From c6d7c9025fae01c889023a26a22582cde00fa204 Mon Sep 17 00:00:00 2001 From: Christian Stewart Date: Thu, 7 Aug 2025 14:01:09 -0700 Subject: [PATCH] WIP Signed-off-by: Christian Stewart --- compiler/expr-selector.go | 12 + compliance/WIP.md | 33 + compliance/deps/go/ast/ast.gs.ts | 4881 +++++++++++++++++ compliance/deps/go/ast/commentmap.gs.ts | 507 ++ compliance/deps/go/ast/filter.gs.ts | 767 +++ compliance/deps/go/ast/import.gs.ts | 418 ++ compliance/deps/go/ast/index.ts | 17 + compliance/deps/go/ast/print.gs.ts | 392 ++ compliance/deps/go/ast/resolve.gs.ts | 298 + compliance/deps/go/ast/scope.gs.ts | 282 + compliance/deps/go/ast/walk.gs.ts | 408 ++ .../deps/go/build/constraint/expr.gs.ts | 982 ++++ compliance/deps/go/build/constraint/index.ts | 4 + .../deps/go/build/constraint/vers.gs.ts | 101 + compliance/deps/go/parser/index.ts | 2 + compliance/deps/go/parser/interface.gs.ts | 297 + compliance/deps/go/parser/parser.gs.ts | 3658 ++++++++++++ compliance/deps/go/parser/resolver.gs.ts | 856 +++ compliance/deps/go/scanner/scanner.gs.ts | 2 +- compliance/deps/go/token/position.gs.ts | 2 +- .../tests/await_selector_on_call/expected.log | 1 + .../tests/await_selector_on_call/index.ts | 2 + .../tests/await_selector_on_call/main.go | 20 + .../tests/await_selector_on_call/main.gs.ts | 55 + .../await_selector_on_call/tsconfig.json | 32 + .../package_import_go_parser.gs.ts | 8 +- .../tests/package_import_go_parser/skip-test | 25 - 27 files changed, 14031 insertions(+), 31 deletions(-) create mode 100644 compliance/WIP.md create mode 100644 compliance/deps/go/ast/ast.gs.ts create mode 100644 compliance/deps/go/ast/commentmap.gs.ts create mode 100644 compliance/deps/go/ast/filter.gs.ts create mode 100644 compliance/deps/go/ast/import.gs.ts create mode 100644 compliance/deps/go/ast/index.ts create mode 100644 compliance/deps/go/ast/print.gs.ts create mode 100644 compliance/deps/go/ast/resolve.gs.ts create mode 100644 compliance/deps/go/ast/scope.gs.ts create mode 100644 compliance/deps/go/ast/walk.gs.ts create mode 100644 compliance/deps/go/build/constraint/expr.gs.ts create mode 100644 compliance/deps/go/build/constraint/index.ts create mode 100644 compliance/deps/go/build/constraint/vers.gs.ts create mode 100644 compliance/deps/go/parser/index.ts create mode 100644 compliance/deps/go/parser/interface.gs.ts create mode 100644 compliance/deps/go/parser/parser.gs.ts create mode 100644 compliance/deps/go/parser/resolver.gs.ts create mode 100644 compliance/tests/await_selector_on_call/expected.log create mode 100644 compliance/tests/await_selector_on_call/index.ts create mode 100644 compliance/tests/await_selector_on_call/main.go create mode 100644 compliance/tests/await_selector_on_call/main.gs.ts create mode 100644 compliance/tests/await_selector_on_call/tsconfig.json delete mode 100644 compliance/tests/package_import_go_parser/skip-test diff --git a/compiler/expr-selector.go b/compiler/expr-selector.go index d1542197..007c4198 100644 --- a/compiler/expr-selector.go +++ b/compiler/expr-selector.go @@ -122,10 +122,22 @@ func (c *GoToTSCompiler) WriteSelectorExpr(exp *ast.SelectorExpr) error { // Fallback / Normal Case (e.g., obj.Field, pkg.Var, method calls) // WriteValueExpr handles adding .value for the base variable itself if it's varrefed. + // If the base is a call expression, wrap it in parentheses to ensure correct precedence + // especially when the call may be prefixed with `await`. + needParens := false + if _, isCall := exp.X.(*ast.CallExpr); isCall { + needParens = true + c.tsw.WriteLiterally("(") + } + if err := c.WriteValueExpr(exp.X); err != nil { return fmt.Errorf("failed to write selector base expression: %w", err) } + if needParens { + c.tsw.WriteLiterally(")") + } + // Add null assertion for selector expressions when accessing fields/methods on nullable types // In Go, accessing fields or calling methods on nil pointers/interfaces panics, so we should throw in TypeScript baseType := c.pkg.TypesInfo.TypeOf(exp.X) diff --git a/compliance/WIP.md b/compliance/WIP.md new file mode 100644 index 00000000..1411b172 --- /dev/null +++ b/compliance/WIP.md @@ -0,0 +1,33 @@ +# Work In Progress + +- Target test: `package_import_go_parser` +- Observed: Hundreds of TS errors cascade from generated deps when importing `go/parser`. +- Most critical issue: member access on awaited call results is emitted with wrong precedence. Example from generated code: + + `await fset!.PositionFor(pos, false)!.Line` + + This tries to access `.Line` on a Promise before awaiting, leading to errors like: + - Property 'Line' does not exist on type 'Promise' + - 'await' expressions only allowed within async functions + + Correct emission should be: + + `(await fset!.PositionFor(pos, false))!.Line` + + i.e., property access needs to occur after awaiting the Promise result. + +- Root cause: + - `WriteSelectorExpr` writes selectors by first emitting the base expression via `WriteValueExpr(exp.X)` and then appending `!.` or `.`. When the base is a call expression that `WriteCallExpr` prefixes with `await`, we end up with `await call()!.prop`. In TypeScript, `await` has lower precedence than member access, so this parses as `await (call()!.prop)` instead of `(await call())!.prop`. + +- Minimal reproducer (no go/parser): + - A function `F()` that becomes async (e.g., contains a channel send), returns `*S` where `S` has a field `V int`. + - Using `F().V` in `main()` triggers selector on a call-expression base. The current compiler generates `await F()!.V` which is wrong; after fix it should be `(await F())!.V`. + +- Planned change: + - In `compiler/expr-selector.go`, detect when the selector base `exp.X` is an `*ast.CallExpr`. Wrap it in parentheses when emitting the base, so that any `await` prefix produced by `WriteCallExpr` is properly grouped: `()!.`. + - This is a general, non-test-specific fix and safe even when the call is not async. + +- Implementation steps: + 1. Add parens around call-expression bases in `WriteSelectorExpr` normal path. + 2. Create new compliance test `await_selector_on_call` that reproduces the issue without importing external packages. + 3. Run the new test, iterate until green. diff --git a/compliance/deps/go/ast/ast.gs.ts b/compliance/deps/go/ast/ast.gs.ts new file mode 100644 index 00000000..49388ee6 --- /dev/null +++ b/compliance/deps/go/ast/ast.gs.ts @@ -0,0 +1,4881 @@ +import * as $ from "@goscript/builtin/index.js" +import { Object, Scope } from "./scope.gs.js"; + +import * as token from "@goscript/go/token/index.js" + +import * as strings from "@goscript/strings/index.js" + +export let SEND: ChanDir = (1 << 0) + +export let RECV: ChanDir = 0 + +// A BadDecl node is a placeholder for a declaration containing +// syntax errors for which a correct declaration node cannot be +// created. +// +export class BadDecl { + // position range of bad declaration + public get From(): token.Pos { + return this._fields.From.value + } + public set From(value: token.Pos) { + this._fields.From.value = value + } + + // position range of bad declaration + public get To(): token.Pos { + return this._fields.To.value + } + public set To(value: token.Pos) { + this._fields.To.value = value + } + + public _fields: { + From: $.VarRef; + To: $.VarRef; + } + + constructor(init?: Partial<{From?: token.Pos, To?: token.Pos}>) { + this._fields = { + From: $.varRef(init?.From ?? 0 as token.Pos), + To: $.varRef(init?.To ?? 0 as token.Pos) + } + } + + public clone(): BadDecl { + const cloned = new BadDecl() + cloned._fields = { + From: $.varRef(this._fields.From.value), + To: $.varRef(this._fields.To.value) + } + return cloned + } + + public Pos(): token.Pos { + const d = this + return d.From + } + + public End(): token.Pos { + const d = this + return d.To + } + + // declNode() ensures that only declaration nodes can be + // assigned to a Decl. + public declNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'BadDecl', + new BadDecl(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "declNode", args: [], returns: [] }], + BadDecl, + {"From": "Pos", "To": "Pos"} + ); +} + +// A BadExpr node is a placeholder for an expression containing +// syntax errors for which a correct expression node cannot be +// created. +// +export class BadExpr { + // position range of bad expression + public get From(): token.Pos { + return this._fields.From.value + } + public set From(value: token.Pos) { + this._fields.From.value = value + } + + // position range of bad expression + public get To(): token.Pos { + return this._fields.To.value + } + public set To(value: token.Pos) { + this._fields.To.value = value + } + + public _fields: { + From: $.VarRef; + To: $.VarRef; + } + + constructor(init?: Partial<{From?: token.Pos, To?: token.Pos}>) { + this._fields = { + From: $.varRef(init?.From ?? 0 as token.Pos), + To: $.varRef(init?.To ?? 0 as token.Pos) + } + } + + public clone(): BadExpr { + const cloned = new BadExpr() + cloned._fields = { + From: $.varRef(this._fields.From.value), + To: $.varRef(this._fields.To.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.From + } + + public End(): token.Pos { + const x = this + return x.To + } + + // exprNode() ensures that only expression/type nodes can be + // assigned to an Expr. + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'BadExpr', + new BadExpr(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + BadExpr, + {"From": "Pos", "To": "Pos"} + ); +} + +// A BadStmt node is a placeholder for statements containing +// syntax errors for which no correct statement nodes can be +// created. +// +export class BadStmt { + // position range of bad statement + public get From(): token.Pos { + return this._fields.From.value + } + public set From(value: token.Pos) { + this._fields.From.value = value + } + + // position range of bad statement + public get To(): token.Pos { + return this._fields.To.value + } + public set To(value: token.Pos) { + this._fields.To.value = value + } + + public _fields: { + From: $.VarRef; + To: $.VarRef; + } + + constructor(init?: Partial<{From?: token.Pos, To?: token.Pos}>) { + this._fields = { + From: $.varRef(init?.From ?? 0 as token.Pos), + To: $.varRef(init?.To ?? 0 as token.Pos) + } + } + + public clone(): BadStmt { + const cloned = new BadStmt() + cloned._fields = { + From: $.varRef(this._fields.From.value), + To: $.varRef(this._fields.To.value) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.From + } + + public End(): token.Pos { + const s = this + return s.To + } + + // stmtNode() ensures that only statement nodes can be + // assigned to a Stmt. + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'BadStmt', + new BadStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + BadStmt, + {"From": "Pos", "To": "Pos"} + ); +} + +// A BasicLit node represents a literal of basic type. +// +// Note that for the CHAR and STRING kinds, the literal is stored +// with its quotes. For example, for a double-quoted STRING, the +// first and the last rune in the Value field will be ". The +// [strconv.Unquote] and [strconv.UnquoteChar] functions can be +// used to unquote STRING and CHAR values, respectively. +// +// For raw string literals (Kind == token.STRING && Value[0] == '`'), +// the Value field contains the string text without carriage returns (\r) that +// may have been present in the source. Because the end position is +// computed using len(Value), the position reported by [BasicLit.End] does not match the +// true source end position for raw string literals containing carriage returns. +export class BasicLit { + // literal position + public get ValuePos(): token.Pos { + return this._fields.ValuePos.value + } + public set ValuePos(value: token.Pos) { + this._fields.ValuePos.value = value + } + + // token.INT, token.FLOAT, token.IMAG, token.CHAR, or token.STRING + public get Kind(): token.Token { + return this._fields.Kind.value + } + public set Kind(value: token.Token) { + this._fields.Kind.value = value + } + + // literal string; e.g. 42, 0x7f, 3.14, 1e-9, 2.4i, 'a', '\x7f', "foo" or `\m\n\o` + public get Value(): string { + return this._fields.Value.value + } + public set Value(value: string) { + this._fields.Value.value = value + } + + public _fields: { + ValuePos: $.VarRef; + Kind: $.VarRef; + Value: $.VarRef; + } + + constructor(init?: Partial<{Kind?: token.Token, Value?: string, ValuePos?: token.Pos}>) { + this._fields = { + ValuePos: $.varRef(init?.ValuePos ?? 0 as token.Pos), + Kind: $.varRef(init?.Kind ?? 0 as token.Token), + Value: $.varRef(init?.Value ?? "") + } + } + + public clone(): BasicLit { + const cloned = new BasicLit() + cloned._fields = { + ValuePos: $.varRef(this._fields.ValuePos.value), + Kind: $.varRef(this._fields.Kind.value), + Value: $.varRef(this._fields.Value.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.ValuePos + } + + public End(): token.Pos { + const x = this + return (x.ValuePos + $.len(x.Value) as token.Pos) + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'BasicLit', + new BasicLit(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + BasicLit, + {"ValuePos": "Pos", "Kind": "Token", "Value": { kind: $.TypeKind.Basic, name: "string" }} + ); +} + +// A BranchStmt node represents a break, continue, goto, +// or fallthrough statement. +// +export class BranchStmt { + // position of Tok + public get TokPos(): token.Pos { + return this._fields.TokPos.value + } + public set TokPos(value: token.Pos) { + this._fields.TokPos.value = value + } + + // keyword token (BREAK, CONTINUE, GOTO, FALLTHROUGH) + public get Tok(): token.Token { + return this._fields.Tok.value + } + public set Tok(value: token.Token) { + this._fields.Tok.value = value + } + + // label name; or nil + public get Label(): Ident | null { + return this._fields.Label.value + } + public set Label(value: Ident | null) { + this._fields.Label.value = value + } + + public _fields: { + TokPos: $.VarRef; + Tok: $.VarRef; + Label: $.VarRef; + } + + constructor(init?: Partial<{Label?: Ident | null, Tok?: token.Token, TokPos?: token.Pos}>) { + this._fields = { + TokPos: $.varRef(init?.TokPos ?? 0 as token.Pos), + Tok: $.varRef(init?.Tok ?? 0 as token.Token), + Label: $.varRef(init?.Label ?? null) + } + } + + public clone(): BranchStmt { + const cloned = new BranchStmt() + cloned._fields = { + TokPos: $.varRef(this._fields.TokPos.value), + Tok: $.varRef(this._fields.Tok.value), + Label: $.varRef(this._fields.Label.value ? $.markAsStructValue(this._fields.Label.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.TokPos + } + + public End(): token.Pos { + const s = this + if (s.Label != null) { + return s.Label!.End() + } + return (s.TokPos + $.len(token.Token_String(s.Tok)) as token.Pos) + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'BranchStmt', + new BranchStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + BranchStmt, + {"TokPos": "Pos", "Tok": "Token", "Label": { kind: $.TypeKind.Pointer, elemType: "Ident" }} + ); +} + +export type ChanDir = number; + +export class Comment { + // position of "/" starting the comment + public get Slash(): token.Pos { + return this._fields.Slash.value + } + public set Slash(value: token.Pos) { + this._fields.Slash.value = value + } + + // comment text (excluding '\n' for //-style comments) + public get Text(): string { + return this._fields.Text.value + } + public set Text(value: string) { + this._fields.Text.value = value + } + + public _fields: { + Slash: $.VarRef; + Text: $.VarRef; + } + + constructor(init?: Partial<{Slash?: token.Pos, Text?: string}>) { + this._fields = { + Slash: $.varRef(init?.Slash ?? 0 as token.Pos), + Text: $.varRef(init?.Text ?? "") + } + } + + public clone(): Comment { + const cloned = new Comment() + cloned._fields = { + Slash: $.varRef(this._fields.Slash.value), + Text: $.varRef(this._fields.Text.value) + } + return cloned + } + + public Pos(): token.Pos { + const c = this + return c.Slash + } + + public End(): token.Pos { + const c = this + return (c.Slash + $.len(c.Text) as token.Pos) + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'Comment', + new Comment(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }], + Comment, + {"Slash": "Pos", "Text": { kind: $.TypeKind.Basic, name: "string" }} + ); +} + +export class CommentGroup { + // len(List) > 0 + public get List(): $.Slice { + return this._fields.List.value + } + public set List(value: $.Slice) { + this._fields.List.value = value + } + + public _fields: { + List: $.VarRef<$.Slice>; + } + + constructor(init?: Partial<{List?: $.Slice}>) { + this._fields = { + List: $.varRef(init?.List ?? null) + } + } + + public clone(): CommentGroup { + const cloned = new CommentGroup() + cloned._fields = { + List: $.varRef(this._fields.List.value) + } + return cloned + } + + public Pos(): token.Pos { + const g = this + return g.List![0]!.Pos() + } + + public End(): token.Pos { + const g = this + return g.List![$.len(g.List) - 1]!.End() + } + + // Text returns the text of the comment. + // Comment markers (//, /*, and */), the first space of a line comment, and + // leading and trailing empty lines are removed. + // Comment directives like "//line" and "//go:noinline" are also removed. + // Multiple empty lines are reduced to one, and trailing space on lines is trimmed. + // Unless the result is empty, it is newline-terminated. + public Text(): string { + const g = this + if (g == null) { + return "" + } + let comments = $.makeSlice($.len(g.List), undefined, 'string') + for (let i = 0; i < $.len(g.List); i++) { + const c = g.List![i] + { + comments![i] = c!.Text + } + } + let lines = $.makeSlice(0, 10, 'string') // most comments are less than 10 lines + for (let _i = 0; _i < $.len(comments); _i++) { + const c = comments![_i] + { + // Remove comment markers. + // The parser has given us exactly the comment text. + + //-style comment (no newline at the end) + + // empty line + + // strip first space - required for Example tests + + // Ignore //go:noinline, //line, and so on. + + /*-style comment */ + switch ($.indexString(c, 1)) { + case 47: + c = $.sliceString(c, 2, undefined) + if ($.len(c) == 0) { + // empty line + break + } + if ($.indexString(c, 0) == 32) { + // strip first space - required for Example tests + c = $.sliceString(c, 1, undefined) + break + } + if (isDirective(c)) { + // Ignore //go:noinline, //line, and so on. + continue + } + break + case 42: + c = $.sliceString(c, 2, $.len(c) - 2) + break + } + + // Split on newlines. + let cl = strings.Split(c, "\n") + + // Walk lines, stripping trailing white space and adding to list. + for (let _i = 0; _i < $.len(cl); _i++) { + const l = cl![_i] + { + lines = $.append(lines, stripTrailingWhitespace(l)) + } + } + } + } + let n = 0 + for (let _i = 0; _i < $.len(lines); _i++) { + const line = lines![_i] + { + if (line != "" || n > 0 && lines![n - 1] != "") { + lines![n] = line + n++ + } + } + } + lines = $.goSlice(lines, 0, n) + if (n > 0 && lines![n - 1] != "") { + lines = $.append(lines, "") + } + return strings.Join(lines, "\n") + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'CommentGroup', + new CommentGroup(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "Text", args: [], returns: [{ type: { kind: $.TypeKind.Basic, name: "string" } }] }], + CommentGroup, + {"List": { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "Comment" } }} + ); +} + +export type Decl = null | { + declNode(): void +} & Node + +$.registerInterfaceType( + 'Decl', + null, // Zero value for interface is null + [{ name: "declNode", args: [], returns: [] }] +); + +// A DeferStmt node represents a defer statement. +export class DeferStmt { + // position of "defer" keyword + public get Defer(): token.Pos { + return this._fields.Defer.value + } + public set Defer(value: token.Pos) { + this._fields.Defer.value = value + } + + public get Call(): CallExpr | null { + return this._fields.Call.value + } + public set Call(value: CallExpr | null) { + this._fields.Call.value = value + } + + public _fields: { + Defer: $.VarRef; + Call: $.VarRef; + } + + constructor(init?: Partial<{Call?: CallExpr | null, Defer?: token.Pos}>) { + this._fields = { + Defer: $.varRef(init?.Defer ?? 0 as token.Pos), + Call: $.varRef(init?.Call ?? null) + } + } + + public clone(): DeferStmt { + const cloned = new DeferStmt() + cloned._fields = { + Defer: $.varRef(this._fields.Defer.value), + Call: $.varRef(this._fields.Call.value ? $.markAsStructValue(this._fields.Call.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Defer + } + + public End(): token.Pos { + const s = this + return s.Call!.End() + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'DeferStmt', + new DeferStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + DeferStmt, + {"Defer": "Pos", "Call": { kind: $.TypeKind.Pointer, elemType: "CallExpr" }} + ); +} + +// An EmptyStmt node represents an empty statement. +// The "position" of the empty statement is the position +// of the immediately following (explicit or implicit) semicolon. +// +export class EmptyStmt { + // position of following ";" + public get Semicolon(): token.Pos { + return this._fields.Semicolon.value + } + public set Semicolon(value: token.Pos) { + this._fields.Semicolon.value = value + } + + // if set, ";" was omitted in the source + public get Implicit(): boolean { + return this._fields.Implicit.value + } + public set Implicit(value: boolean) { + this._fields.Implicit.value = value + } + + public _fields: { + Semicolon: $.VarRef; + Implicit: $.VarRef; + } + + constructor(init?: Partial<{Implicit?: boolean, Semicolon?: token.Pos}>) { + this._fields = { + Semicolon: $.varRef(init?.Semicolon ?? 0 as token.Pos), + Implicit: $.varRef(init?.Implicit ?? false) + } + } + + public clone(): EmptyStmt { + const cloned = new EmptyStmt() + cloned._fields = { + Semicolon: $.varRef(this._fields.Semicolon.value), + Implicit: $.varRef(this._fields.Implicit.value) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Semicolon + } + + public End(): token.Pos { + const s = this + if (s.Implicit) { + return s.Semicolon + } + return s.Semicolon + 1 + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'EmptyStmt', + new EmptyStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + EmptyStmt, + {"Semicolon": "Pos", "Implicit": { kind: $.TypeKind.Basic, name: "boolean" }} + ); +} + +export type Expr = null | { + exprNode(): void +} & Node + +$.registerInterfaceType( + 'Expr', + null, // Zero value for interface is null + [{ name: "exprNode", args: [], returns: [] }] +); + +export class FieldList { + // position of opening parenthesis/brace/bracket, if any + public get Opening(): token.Pos { + return this._fields.Opening.value + } + public set Opening(value: token.Pos) { + this._fields.Opening.value = value + } + + // field list; or nil + public get List(): $.Slice { + return this._fields.List.value + } + public set List(value: $.Slice) { + this._fields.List.value = value + } + + // position of closing parenthesis/brace/bracket, if any + public get Closing(): token.Pos { + return this._fields.Closing.value + } + public set Closing(value: token.Pos) { + this._fields.Closing.value = value + } + + public _fields: { + Opening: $.VarRef; + List: $.VarRef<$.Slice>; + Closing: $.VarRef; + } + + constructor(init?: Partial<{Closing?: token.Pos, List?: $.Slice, Opening?: token.Pos}>) { + this._fields = { + Opening: $.varRef(init?.Opening ?? 0 as token.Pos), + List: $.varRef(init?.List ?? null), + Closing: $.varRef(init?.Closing ?? 0 as token.Pos) + } + } + + public clone(): FieldList { + const cloned = new FieldList() + cloned._fields = { + Opening: $.varRef(this._fields.Opening.value), + List: $.varRef(this._fields.List.value), + Closing: $.varRef(this._fields.Closing.value) + } + return cloned + } + + public Pos(): token.Pos { + const f = this + if (token.Pos_IsValid(f.Opening)) { + return f.Opening + } + if ($.len(f.List) > 0) { + return f.List![0]!.Pos() + } + return token.NoPos + } + + public End(): token.Pos { + const f = this + if (token.Pos_IsValid(f.Closing)) { + return f.Closing + 1 + } + { + let n = $.len(f.List) + if (n > 0) { + return f.List![n - 1]!.End() + } + } + return token.NoPos + } + + // NumFields returns the number of parameters or struct fields represented by a [FieldList]. + public NumFields(): number { + const f = this + let n = 0 + if (f != null) { + for (let _i = 0; _i < $.len(f.List); _i++) { + const g = f.List![_i] + { + let m = $.len(g!.Names) + if (m == 0) { + m = 1 + } + n += m + } + } + } + return n + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'FieldList', + new FieldList(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "NumFields", args: [], returns: [{ type: { kind: $.TypeKind.Basic, name: "number" } }] }], + FieldList, + {"Opening": "Pos", "List": { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "Field" } }, "Closing": "Pos"} + ); +} + +// A FuncDecl node represents a function declaration. +export class FuncDecl { + // associated documentation; or nil + public get Doc(): CommentGroup | null { + return this._fields.Doc.value + } + public set Doc(value: CommentGroup | null) { + this._fields.Doc.value = value + } + + // receiver (methods); or nil (functions) + public get Recv(): FieldList | null { + return this._fields.Recv.value + } + public set Recv(value: FieldList | null) { + this._fields.Recv.value = value + } + + // function/method name + public get Name(): Ident | null { + return this._fields.Name.value + } + public set Name(value: Ident | null) { + this._fields.Name.value = value + } + + // function signature: type and value parameters, results, and position of "func" keyword + public get Type(): FuncType | null { + return this._fields.Type.value + } + public set Type(value: FuncType | null) { + this._fields.Type.value = value + } + + // function body; or nil for external (non-Go) function + public get Body(): BlockStmt | null { + return this._fields.Body.value + } + public set Body(value: BlockStmt | null) { + this._fields.Body.value = value + } + + public _fields: { + Doc: $.VarRef; + Recv: $.VarRef; + Name: $.VarRef; + Type: $.VarRef; + Body: $.VarRef; + } + + constructor(init?: Partial<{Body?: BlockStmt | null, Doc?: CommentGroup | null, Name?: Ident | null, Recv?: FieldList | null, Type?: FuncType | null}>) { + this._fields = { + Doc: $.varRef(init?.Doc ?? null), + Recv: $.varRef(init?.Recv ?? null), + Name: $.varRef(init?.Name ?? null), + Type: $.varRef(init?.Type ?? null), + Body: $.varRef(init?.Body ?? null) + } + } + + public clone(): FuncDecl { + const cloned = new FuncDecl() + cloned._fields = { + Doc: $.varRef(this._fields.Doc.value ? $.markAsStructValue(this._fields.Doc.value.clone()) : null), + Recv: $.varRef(this._fields.Recv.value ? $.markAsStructValue(this._fields.Recv.value.clone()) : null), + Name: $.varRef(this._fields.Name.value ? $.markAsStructValue(this._fields.Name.value.clone()) : null), + Type: $.varRef(this._fields.Type.value ? $.markAsStructValue(this._fields.Type.value.clone()) : null), + Body: $.varRef(this._fields.Body.value ? $.markAsStructValue(this._fields.Body.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const d = this + return d.Type!.Pos() + } + + public End(): token.Pos { + const d = this + if (d.Body != null) { + return d.Body!.End() + } + return d.Type!.End() + } + + public declNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'FuncDecl', + new FuncDecl(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "declNode", args: [], returns: [] }], + FuncDecl, + {"Doc": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }, "Recv": { kind: $.TypeKind.Pointer, elemType: "FieldList" }, "Name": { kind: $.TypeKind.Pointer, elemType: "Ident" }, "Type": { kind: $.TypeKind.Pointer, elemType: "FuncType" }, "Body": { kind: $.TypeKind.Pointer, elemType: "BlockStmt" }} + ); +} + +// A FuncLit node represents a function literal. +export class FuncLit { + // function type + public get Type(): FuncType | null { + return this._fields.Type.value + } + public set Type(value: FuncType | null) { + this._fields.Type.value = value + } + + // function body + public get Body(): BlockStmt | null { + return this._fields.Body.value + } + public set Body(value: BlockStmt | null) { + this._fields.Body.value = value + } + + public _fields: { + Type: $.VarRef; + Body: $.VarRef; + } + + constructor(init?: Partial<{Body?: BlockStmt | null, Type?: FuncType | null}>) { + this._fields = { + Type: $.varRef(init?.Type ?? null), + Body: $.varRef(init?.Body ?? null) + } + } + + public clone(): FuncLit { + const cloned = new FuncLit() + cloned._fields = { + Type: $.varRef(this._fields.Type.value ? $.markAsStructValue(this._fields.Type.value.clone()) : null), + Body: $.varRef(this._fields.Body.value ? $.markAsStructValue(this._fields.Body.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.Type!.Pos() + } + + public End(): token.Pos { + const x = this + return x.Body!.End() + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'FuncLit', + new FuncLit(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + FuncLit, + {"Type": { kind: $.TypeKind.Pointer, elemType: "FuncType" }, "Body": { kind: $.TypeKind.Pointer, elemType: "BlockStmt" }} + ); +} + +// A FuncType node represents a function type. +export class FuncType { + // position of "func" keyword (token.NoPos if there is no "func") + public get Func(): token.Pos { + return this._fields.Func.value + } + public set Func(value: token.Pos) { + this._fields.Func.value = value + } + + // type parameters; or nil + public get TypeParams(): FieldList | null { + return this._fields.TypeParams.value + } + public set TypeParams(value: FieldList | null) { + this._fields.TypeParams.value = value + } + + // (incoming) parameters; non-nil + public get Params(): FieldList | null { + return this._fields.Params.value + } + public set Params(value: FieldList | null) { + this._fields.Params.value = value + } + + // (outgoing) results; or nil + public get Results(): FieldList | null { + return this._fields.Results.value + } + public set Results(value: FieldList | null) { + this._fields.Results.value = value + } + + public _fields: { + Func: $.VarRef; + TypeParams: $.VarRef; + Params: $.VarRef; + Results: $.VarRef; + } + + constructor(init?: Partial<{Func?: token.Pos, Params?: FieldList | null, Results?: FieldList | null, TypeParams?: FieldList | null}>) { + this._fields = { + Func: $.varRef(init?.Func ?? 0 as token.Pos), + TypeParams: $.varRef(init?.TypeParams ?? null), + Params: $.varRef(init?.Params ?? null), + Results: $.varRef(init?.Results ?? null) + } + } + + public clone(): FuncType { + const cloned = new FuncType() + cloned._fields = { + Func: $.varRef(this._fields.Func.value), + TypeParams: $.varRef(this._fields.TypeParams.value ? $.markAsStructValue(this._fields.TypeParams.value.clone()) : null), + Params: $.varRef(this._fields.Params.value ? $.markAsStructValue(this._fields.Params.value.clone()) : null), + Results: $.varRef(this._fields.Results.value ? $.markAsStructValue(this._fields.Results.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + if (token.Pos_IsValid(x.Func) || x.Params == null) { + // see issue 3870 + return x.Func + } + return x.Params!.Pos() + } + + public End(): token.Pos { + const x = this + if (x.Results != null) { + return x.Results!.End() + } + return x.Params!.End() + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'FuncType', + new FuncType(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + FuncType, + {"Func": "Pos", "TypeParams": { kind: $.TypeKind.Pointer, elemType: "FieldList" }, "Params": { kind: $.TypeKind.Pointer, elemType: "FieldList" }, "Results": { kind: $.TypeKind.Pointer, elemType: "FieldList" }} + ); +} + +// A GoStmt node represents a go statement. +export class GoStmt { + // position of "go" keyword + public get Go(): token.Pos { + return this._fields.Go.value + } + public set Go(value: token.Pos) { + this._fields.Go.value = value + } + + public get Call(): CallExpr | null { + return this._fields.Call.value + } + public set Call(value: CallExpr | null) { + this._fields.Call.value = value + } + + public _fields: { + Go: $.VarRef; + Call: $.VarRef; + } + + constructor(init?: Partial<{Call?: CallExpr | null, Go?: token.Pos}>) { + this._fields = { + Go: $.varRef(init?.Go ?? 0 as token.Pos), + Call: $.varRef(init?.Call ?? null) + } + } + + public clone(): GoStmt { + const cloned = new GoStmt() + cloned._fields = { + Go: $.varRef(this._fields.Go.value), + Call: $.varRef(this._fields.Call.value ? $.markAsStructValue(this._fields.Call.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Go + } + + public End(): token.Pos { + const s = this + return s.Call!.End() + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'GoStmt', + new GoStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + GoStmt, + {"Go": "Pos", "Call": { kind: $.TypeKind.Pointer, elemType: "CallExpr" }} + ); +} + +// An Ident node represents an identifier. +export class Ident { + // identifier position + public get NamePos(): token.Pos { + return this._fields.NamePos.value + } + public set NamePos(value: token.Pos) { + this._fields.NamePos.value = value + } + + // identifier name + public get Name(): string { + return this._fields.Name.value + } + public set Name(value: string) { + this._fields.Name.value = value + } + + // denoted object, or nil. Deprecated: see Object. + public get Obj(): Object | null { + return this._fields.Obj.value + } + public set Obj(value: Object | null) { + this._fields.Obj.value = value + } + + public _fields: { + NamePos: $.VarRef; + Name: $.VarRef; + Obj: $.VarRef; + } + + constructor(init?: Partial<{Name?: string, NamePos?: token.Pos, Obj?: Object | null}>) { + this._fields = { + NamePos: $.varRef(init?.NamePos ?? 0 as token.Pos), + Name: $.varRef(init?.Name ?? ""), + Obj: $.varRef(init?.Obj ?? null) + } + } + + public clone(): Ident { + const cloned = new Ident() + cloned._fields = { + NamePos: $.varRef(this._fields.NamePos.value), + Name: $.varRef(this._fields.Name.value), + Obj: $.varRef(this._fields.Obj.value ? $.markAsStructValue(this._fields.Obj.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.NamePos + } + + public End(): token.Pos { + const x = this + return (x.NamePos + $.len(x.Name) as token.Pos) + } + + public exprNode(): void { + } + + // IsExported reports whether id starts with an upper-case letter. + public IsExported(): boolean { + const id = this + return token.IsExported(id.Name) + } + + public String(): string { + const id = this + if (id != null) { + return id.Name + } + return "" + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'Ident', + new Ident(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }, { name: "IsExported", args: [], returns: [{ type: { kind: $.TypeKind.Basic, name: "boolean" } }] }, { name: "String", args: [], returns: [{ type: { kind: $.TypeKind.Basic, name: "string" } }] }], + Ident, + {"NamePos": "Pos", "Name": { kind: $.TypeKind.Basic, name: "string" }, "Obj": { kind: $.TypeKind.Pointer, elemType: "Object" }} + ); +} + +// An ImportSpec node represents a single package import. +export class ImportSpec { + // associated documentation; or nil + public get Doc(): CommentGroup | null { + return this._fields.Doc.value + } + public set Doc(value: CommentGroup | null) { + this._fields.Doc.value = value + } + + // local package name (including "."); or nil + public get Name(): Ident | null { + return this._fields.Name.value + } + public set Name(value: Ident | null) { + this._fields.Name.value = value + } + + // import path + public get Path(): BasicLit | null { + return this._fields.Path.value + } + public set Path(value: BasicLit | null) { + this._fields.Path.value = value + } + + // line comments; or nil + public get Comment(): CommentGroup | null { + return this._fields.Comment.value + } + public set Comment(value: CommentGroup | null) { + this._fields.Comment.value = value + } + + // end of spec (overrides Path.Pos if nonzero) + public get EndPos(): token.Pos { + return this._fields.EndPos.value + } + public set EndPos(value: token.Pos) { + this._fields.EndPos.value = value + } + + public _fields: { + Doc: $.VarRef; + Name: $.VarRef; + Path: $.VarRef; + Comment: $.VarRef; + EndPos: $.VarRef; + } + + constructor(init?: Partial<{Comment?: CommentGroup | null, Doc?: CommentGroup | null, EndPos?: token.Pos, Name?: Ident | null, Path?: BasicLit | null}>) { + this._fields = { + Doc: $.varRef(init?.Doc ?? null), + Name: $.varRef(init?.Name ?? null), + Path: $.varRef(init?.Path ?? null), + Comment: $.varRef(init?.Comment ?? null), + EndPos: $.varRef(init?.EndPos ?? 0 as token.Pos) + } + } + + public clone(): ImportSpec { + const cloned = new ImportSpec() + cloned._fields = { + Doc: $.varRef(this._fields.Doc.value ? $.markAsStructValue(this._fields.Doc.value.clone()) : null), + Name: $.varRef(this._fields.Name.value ? $.markAsStructValue(this._fields.Name.value.clone()) : null), + Path: $.varRef(this._fields.Path.value ? $.markAsStructValue(this._fields.Path.value.clone()) : null), + Comment: $.varRef(this._fields.Comment.value ? $.markAsStructValue(this._fields.Comment.value.clone()) : null), + EndPos: $.varRef(this._fields.EndPos.value) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + if (s.Name != null) { + return s.Name!.Pos() + } + return s.Path!.Pos() + } + + public End(): token.Pos { + const s = this + if (s.EndPos != 0) { + return s.EndPos + } + return s.Path!.End() + } + + // specNode() ensures that only spec nodes can be + // assigned to a Spec. + public specNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'ImportSpec', + new ImportSpec(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "specNode", args: [], returns: [] }], + ImportSpec, + {"Doc": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }, "Name": { kind: $.TypeKind.Pointer, elemType: "Ident" }, "Path": { kind: $.TypeKind.Pointer, elemType: "BasicLit" }, "Comment": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }, "EndPos": "Pos"} + ); +} + +// An InterfaceType node represents an interface type. +export class InterfaceType { + // position of "interface" keyword + public get Interface(): token.Pos { + return this._fields.Interface.value + } + public set Interface(value: token.Pos) { + this._fields.Interface.value = value + } + + // list of embedded interfaces, methods, or types + public get Methods(): FieldList | null { + return this._fields.Methods.value + } + public set Methods(value: FieldList | null) { + this._fields.Methods.value = value + } + + // true if (source) methods or types are missing in the Methods list + public get Incomplete(): boolean { + return this._fields.Incomplete.value + } + public set Incomplete(value: boolean) { + this._fields.Incomplete.value = value + } + + public _fields: { + Interface: $.VarRef; + Methods: $.VarRef; + Incomplete: $.VarRef; + } + + constructor(init?: Partial<{Incomplete?: boolean, Interface?: token.Pos, Methods?: FieldList | null}>) { + this._fields = { + Interface: $.varRef(init?.Interface ?? 0 as token.Pos), + Methods: $.varRef(init?.Methods ?? null), + Incomplete: $.varRef(init?.Incomplete ?? false) + } + } + + public clone(): InterfaceType { + const cloned = new InterfaceType() + cloned._fields = { + Interface: $.varRef(this._fields.Interface.value), + Methods: $.varRef(this._fields.Methods.value ? $.markAsStructValue(this._fields.Methods.value.clone()) : null), + Incomplete: $.varRef(this._fields.Incomplete.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.Interface + } + + public End(): token.Pos { + const x = this + return x.Methods!.End() + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'InterfaceType', + new InterfaceType(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + InterfaceType, + {"Interface": "Pos", "Methods": { kind: $.TypeKind.Pointer, elemType: "FieldList" }, "Incomplete": { kind: $.TypeKind.Basic, name: "boolean" }} + ); +} + +export type Node = null | { + // position of first character immediately after the node + End(): token.Pos + // position of first character belonging to the node + Pos(): token.Pos +} + +$.registerInterfaceType( + 'Node', + null, // Zero value for interface is null + [{ name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "Pos", args: [], returns: [{ type: "Pos" }] }] +); + +export class Package { + // package name + public get Name(): string { + return this._fields.Name.value + } + public set Name(value: string) { + this._fields.Name.value = value + } + + // package scope across all files + public get Scope(): Scope | null { + return this._fields.Scope.value + } + public set Scope(value: Scope | null) { + this._fields.Scope.value = value + } + + // map of package id -> package object + public get Imports(): Map | null { + return this._fields.Imports.value + } + public set Imports(value: Map | null) { + this._fields.Imports.value = value + } + + // Go source files by filename + public get Files(): Map | null { + return this._fields.Files.value + } + public set Files(value: Map | null) { + this._fields.Files.value = value + } + + public _fields: { + Name: $.VarRef; + Scope: $.VarRef; + Imports: $.VarRef | null>; + Files: $.VarRef | null>; + } + + constructor(init?: Partial<{Files?: Map | null, Imports?: Map | null, Name?: string, Scope?: Scope | null}>) { + this._fields = { + Name: $.varRef(init?.Name ?? ""), + Scope: $.varRef(init?.Scope ?? null), + Imports: $.varRef(init?.Imports ?? null), + Files: $.varRef(init?.Files ?? null) + } + } + + public clone(): Package { + const cloned = new Package() + cloned._fields = { + Name: $.varRef(this._fields.Name.value), + Scope: $.varRef(this._fields.Scope.value ? $.markAsStructValue(this._fields.Scope.value.clone()) : null), + Imports: $.varRef(this._fields.Imports.value), + Files: $.varRef(this._fields.Files.value) + } + return cloned + } + + public Pos(): token.Pos { + return token.NoPos + } + + public End(): token.Pos { + return token.NoPos + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'Package', + new Package(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }], + Package, + {"Name": { kind: $.TypeKind.Basic, name: "string" }, "Scope": { kind: $.TypeKind.Pointer, elemType: "Scope" }, "Imports": { kind: $.TypeKind.Map, keyType: { kind: $.TypeKind.Basic, name: "string" }, elemType: { kind: $.TypeKind.Pointer, elemType: "Object" } }, "Files": { kind: $.TypeKind.Map, keyType: { kind: $.TypeKind.Basic, name: "string" }, elemType: { kind: $.TypeKind.Pointer, elemType: "File" } }} + ); +} + +// A SelectStmt node represents a select statement. +export class SelectStmt { + // position of "select" keyword + public get Select(): token.Pos { + return this._fields.Select.value + } + public set Select(value: token.Pos) { + this._fields.Select.value = value + } + + // CommClauses only + public get Body(): BlockStmt | null { + return this._fields.Body.value + } + public set Body(value: BlockStmt | null) { + this._fields.Body.value = value + } + + public _fields: { + Select: $.VarRef; + Body: $.VarRef; + } + + constructor(init?: Partial<{Body?: BlockStmt | null, Select?: token.Pos}>) { + this._fields = { + Select: $.varRef(init?.Select ?? 0 as token.Pos), + Body: $.varRef(init?.Body ?? null) + } + } + + public clone(): SelectStmt { + const cloned = new SelectStmt() + cloned._fields = { + Select: $.varRef(this._fields.Select.value), + Body: $.varRef(this._fields.Body.value ? $.markAsStructValue(this._fields.Body.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Select + } + + public End(): token.Pos { + const s = this + return s.Body!.End() + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'SelectStmt', + new SelectStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + SelectStmt, + {"Select": "Pos", "Body": { kind: $.TypeKind.Pointer, elemType: "BlockStmt" }} + ); +} + +// The Spec type stands for any of *ImportSpec, *ValueSpec, and *TypeSpec. +export type Spec = null | { + specNode(): void +} & Node + +$.registerInterfaceType( + 'Spec', + null, // Zero value for interface is null + [{ name: "specNode", args: [], returns: [] }] +); + +export type Stmt = null | { + stmtNode(): void +} & Node + +$.registerInterfaceType( + 'Stmt', + null, // Zero value for interface is null + [{ name: "stmtNode", args: [], returns: [] }] +); + +// A StructType node represents a struct type. +export class StructType { + // position of "struct" keyword + public get Struct(): token.Pos { + return this._fields.Struct.value + } + public set Struct(value: token.Pos) { + this._fields.Struct.value = value + } + + // list of field declarations + public get Fields(): FieldList | null { + return this._fields.Fields.value + } + public set Fields(value: FieldList | null) { + this._fields.Fields.value = value + } + + // true if (source) fields are missing in the Fields list + public get Incomplete(): boolean { + return this._fields.Incomplete.value + } + public set Incomplete(value: boolean) { + this._fields.Incomplete.value = value + } + + public _fields: { + Struct: $.VarRef; + Fields: $.VarRef; + Incomplete: $.VarRef; + } + + constructor(init?: Partial<{Fields?: FieldList | null, Incomplete?: boolean, Struct?: token.Pos}>) { + this._fields = { + Struct: $.varRef(init?.Struct ?? 0 as token.Pos), + Fields: $.varRef(init?.Fields ?? null), + Incomplete: $.varRef(init?.Incomplete ?? false) + } + } + + public clone(): StructType { + const cloned = new StructType() + cloned._fields = { + Struct: $.varRef(this._fields.Struct.value), + Fields: $.varRef(this._fields.Fields.value ? $.markAsStructValue(this._fields.Fields.value.clone()) : null), + Incomplete: $.varRef(this._fields.Incomplete.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.Struct + } + + public End(): token.Pos { + const x = this + return x.Fields!.End() + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'StructType', + new StructType(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + StructType, + {"Struct": "Pos", "Fields": { kind: $.TypeKind.Pointer, elemType: "FieldList" }, "Incomplete": { kind: $.TypeKind.Basic, name: "boolean" }} + ); +} + +// A DeclStmt node represents a declaration in a statement list. +export class DeclStmt { + // *GenDecl with CONST, TYPE, or VAR token + public get Decl(): Decl { + return this._fields.Decl.value + } + public set Decl(value: Decl) { + this._fields.Decl.value = value + } + + public _fields: { + Decl: $.VarRef; + } + + constructor(init?: Partial<{Decl?: Decl}>) { + this._fields = { + Decl: $.varRef(init?.Decl ?? null) + } + } + + public clone(): DeclStmt { + const cloned = new DeclStmt() + cloned._fields = { + Decl: $.varRef(this._fields.Decl.value) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Decl!.Pos() + } + + public End(): token.Pos { + const s = this + return s.Decl!.End() + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'DeclStmt', + new DeclStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + DeclStmt, + {"Decl": "Decl"} + ); +} + +export class File { + // associated documentation; or nil + public get Doc(): CommentGroup | null { + return this._fields.Doc.value + } + public set Doc(value: CommentGroup | null) { + this._fields.Doc.value = value + } + + // position of "package" keyword + public get Package(): token.Pos { + return this._fields.Package.value + } + public set Package(value: token.Pos) { + this._fields.Package.value = value + } + + // package name + public get Name(): Ident | null { + return this._fields.Name.value + } + public set Name(value: Ident | null) { + this._fields.Name.value = value + } + + // top-level declarations; or nil + public get Decls(): $.Slice { + return this._fields.Decls.value + } + public set Decls(value: $.Slice) { + this._fields.Decls.value = value + } + + // start and end of entire file + public get FileStart(): token.Pos { + return this._fields.FileStart.value + } + public set FileStart(value: token.Pos) { + this._fields.FileStart.value = value + } + + // start and end of entire file + public get FileEnd(): token.Pos { + return this._fields.FileEnd.value + } + public set FileEnd(value: token.Pos) { + this._fields.FileEnd.value = value + } + + // package scope (this file only). Deprecated: see Object + public get Scope(): Scope | null { + return this._fields.Scope.value + } + public set Scope(value: Scope | null) { + this._fields.Scope.value = value + } + + // imports in this file + public get Imports(): $.Slice { + return this._fields.Imports.value + } + public set Imports(value: $.Slice) { + this._fields.Imports.value = value + } + + // unresolved identifiers in this file. Deprecated: see Object + public get Unresolved(): $.Slice { + return this._fields.Unresolved.value + } + public set Unresolved(value: $.Slice) { + this._fields.Unresolved.value = value + } + + // list of all comments in the source file + public get Comments(): $.Slice { + return this._fields.Comments.value + } + public set Comments(value: $.Slice) { + this._fields.Comments.value = value + } + + // minimum Go version required by //go:build or // +build directives + public get GoVersion(): string { + return this._fields.GoVersion.value + } + public set GoVersion(value: string) { + this._fields.GoVersion.value = value + } + + public _fields: { + Doc: $.VarRef; + Package: $.VarRef; + Name: $.VarRef; + Decls: $.VarRef<$.Slice>; + FileStart: $.VarRef; + FileEnd: $.VarRef; + Scope: $.VarRef; + Imports: $.VarRef<$.Slice>; + Unresolved: $.VarRef<$.Slice>; + Comments: $.VarRef<$.Slice>; + GoVersion: $.VarRef; + } + + constructor(init?: Partial<{Comments?: $.Slice, Decls?: $.Slice, Doc?: CommentGroup | null, FileEnd?: token.Pos, FileStart?: token.Pos, GoVersion?: string, Imports?: $.Slice, Name?: Ident | null, Package?: token.Pos, Scope?: Scope | null, Unresolved?: $.Slice}>) { + this._fields = { + Doc: $.varRef(init?.Doc ?? null), + Package: $.varRef(init?.Package ?? 0 as token.Pos), + Name: $.varRef(init?.Name ?? null), + Decls: $.varRef(init?.Decls ?? null), + FileStart: $.varRef(init?.FileStart ?? 0 as token.Pos), + FileEnd: $.varRef(init?.FileEnd ?? 0 as token.Pos), + Scope: $.varRef(init?.Scope ?? null), + Imports: $.varRef(init?.Imports ?? null), + Unresolved: $.varRef(init?.Unresolved ?? null), + Comments: $.varRef(init?.Comments ?? null), + GoVersion: $.varRef(init?.GoVersion ?? "") + } + } + + public clone(): File { + const cloned = new File() + cloned._fields = { + Doc: $.varRef(this._fields.Doc.value ? $.markAsStructValue(this._fields.Doc.value.clone()) : null), + Package: $.varRef(this._fields.Package.value), + Name: $.varRef(this._fields.Name.value ? $.markAsStructValue(this._fields.Name.value.clone()) : null), + Decls: $.varRef(this._fields.Decls.value), + FileStart: $.varRef(this._fields.FileStart.value), + FileEnd: $.varRef(this._fields.FileEnd.value), + Scope: $.varRef(this._fields.Scope.value ? $.markAsStructValue(this._fields.Scope.value.clone()) : null), + Imports: $.varRef(this._fields.Imports.value), + Unresolved: $.varRef(this._fields.Unresolved.value), + Comments: $.varRef(this._fields.Comments.value), + GoVersion: $.varRef(this._fields.GoVersion.value) + } + return cloned + } + + // Pos returns the position of the package declaration. + // It may be invalid, for example in an empty file. + // + // (Use FileStart for the start of the entire file. It is always valid.) + public Pos(): token.Pos { + const f = this + return f.Package + } + + // End returns the end of the last declaration in the file. + // It may be invalid, for example in an empty file. + // + // (Use FileEnd for the end of the entire file. It is always valid.) + public End(): token.Pos { + const f = this + { + let n = $.len(f.Decls) + if (n > 0) { + return f.Decls![n - 1]!.End() + } + } + return f.Name!.End() + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'File', + new File(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }], + File, + {"Doc": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }, "Package": "Pos", "Name": { kind: $.TypeKind.Pointer, elemType: "Ident" }, "Decls": { kind: $.TypeKind.Slice, elemType: "Decl" }, "FileStart": "Pos", "FileEnd": "Pos", "Scope": { kind: $.TypeKind.Pointer, elemType: "Scope" }, "Imports": { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "ImportSpec" } }, "Unresolved": { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "Ident" } }, "Comments": { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "CommentGroup" } }, "GoVersion": { kind: $.TypeKind.Basic, name: "string" }} + ); +} + +// An ArrayType node represents an array or slice type. +export class ArrayType { + // position of "[" + public get Lbrack(): token.Pos { + return this._fields.Lbrack.value + } + public set Lbrack(value: token.Pos) { + this._fields.Lbrack.value = value + } + + // Ellipsis node for [...]T array types, nil for slice types + public get Len(): Expr { + return this._fields.Len.value + } + public set Len(value: Expr) { + this._fields.Len.value = value + } + + // element type + public get Elt(): Expr { + return this._fields.Elt.value + } + public set Elt(value: Expr) { + this._fields.Elt.value = value + } + + public _fields: { + Lbrack: $.VarRef; + Len: $.VarRef; + Elt: $.VarRef; + } + + constructor(init?: Partial<{Elt?: Expr, Lbrack?: token.Pos, Len?: Expr}>) { + this._fields = { + Lbrack: $.varRef(init?.Lbrack ?? 0 as token.Pos), + Len: $.varRef(init?.Len ?? null), + Elt: $.varRef(init?.Elt ?? null) + } + } + + public clone(): ArrayType { + const cloned = new ArrayType() + cloned._fields = { + Lbrack: $.varRef(this._fields.Lbrack.value), + Len: $.varRef(this._fields.Len.value), + Elt: $.varRef(this._fields.Elt.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.Lbrack + } + + public End(): token.Pos { + const x = this + return x.Elt!.End() + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'ArrayType', + new ArrayType(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + ArrayType, + {"Lbrack": "Pos", "Len": "Expr", "Elt": "Expr"} + ); +} + +// An AssignStmt node represents an assignment or +// a short variable declaration. +// +export class AssignStmt { + public get Lhs(): $.Slice { + return this._fields.Lhs.value + } + public set Lhs(value: $.Slice) { + this._fields.Lhs.value = value + } + + // position of Tok + public get TokPos(): token.Pos { + return this._fields.TokPos.value + } + public set TokPos(value: token.Pos) { + this._fields.TokPos.value = value + } + + // assignment token, DEFINE + public get Tok(): token.Token { + return this._fields.Tok.value + } + public set Tok(value: token.Token) { + this._fields.Tok.value = value + } + + public get Rhs(): $.Slice { + return this._fields.Rhs.value + } + public set Rhs(value: $.Slice) { + this._fields.Rhs.value = value + } + + public _fields: { + Lhs: $.VarRef<$.Slice>; + TokPos: $.VarRef; + Tok: $.VarRef; + Rhs: $.VarRef<$.Slice>; + } + + constructor(init?: Partial<{Lhs?: $.Slice, Rhs?: $.Slice, Tok?: token.Token, TokPos?: token.Pos}>) { + this._fields = { + Lhs: $.varRef(init?.Lhs ?? null), + TokPos: $.varRef(init?.TokPos ?? 0 as token.Pos), + Tok: $.varRef(init?.Tok ?? 0 as token.Token), + Rhs: $.varRef(init?.Rhs ?? null) + } + } + + public clone(): AssignStmt { + const cloned = new AssignStmt() + cloned._fields = { + Lhs: $.varRef(this._fields.Lhs.value), + TokPos: $.varRef(this._fields.TokPos.value), + Tok: $.varRef(this._fields.Tok.value), + Rhs: $.varRef(this._fields.Rhs.value) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Lhs![0]!.Pos() + } + + public End(): token.Pos { + const s = this + return s.Rhs![$.len(s.Rhs) - 1]!.End() + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'AssignStmt', + new AssignStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + AssignStmt, + {"Lhs": { kind: $.TypeKind.Slice, elemType: "Expr" }, "TokPos": "Pos", "Tok": "Token", "Rhs": { kind: $.TypeKind.Slice, elemType: "Expr" }} + ); +} + +// A BinaryExpr node represents a binary expression. +export class BinaryExpr { + // left operand + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + // position of Op + public get OpPos(): token.Pos { + return this._fields.OpPos.value + } + public set OpPos(value: token.Pos) { + this._fields.OpPos.value = value + } + + // operator + public get Op(): token.Token { + return this._fields.Op.value + } + public set Op(value: token.Token) { + this._fields.Op.value = value + } + + // right operand + public get Y(): Expr { + return this._fields.Y.value + } + public set Y(value: Expr) { + this._fields.Y.value = value + } + + public _fields: { + X: $.VarRef; + OpPos: $.VarRef; + Op: $.VarRef; + Y: $.VarRef; + } + + constructor(init?: Partial<{Op?: token.Token, OpPos?: token.Pos, X?: Expr, Y?: Expr}>) { + this._fields = { + X: $.varRef(init?.X ?? null), + OpPos: $.varRef(init?.OpPos ?? 0 as token.Pos), + Op: $.varRef(init?.Op ?? 0 as token.Token), + Y: $.varRef(init?.Y ?? null) + } + } + + public clone(): BinaryExpr { + const cloned = new BinaryExpr() + cloned._fields = { + X: $.varRef(this._fields.X.value), + OpPos: $.varRef(this._fields.OpPos.value), + Op: $.varRef(this._fields.Op.value), + Y: $.varRef(this._fields.Y.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.X!.Pos() + } + + public End(): token.Pos { + const x = this + return x.Y!.End() + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'BinaryExpr', + new BinaryExpr(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + BinaryExpr, + {"X": "Expr", "OpPos": "Pos", "Op": "Token", "Y": "Expr"} + ); +} + +// A CallExpr node represents an expression followed by an argument list. +export class CallExpr { + // function expression + public get Fun(): Expr { + return this._fields.Fun.value + } + public set Fun(value: Expr) { + this._fields.Fun.value = value + } + + // position of "(" + public get Lparen(): token.Pos { + return this._fields.Lparen.value + } + public set Lparen(value: token.Pos) { + this._fields.Lparen.value = value + } + + // function arguments; or nil + public get Args(): $.Slice { + return this._fields.Args.value + } + public set Args(value: $.Slice) { + this._fields.Args.value = value + } + + // position of "..." (token.NoPos if there is no "...") + public get Ellipsis(): token.Pos { + return this._fields.Ellipsis.value + } + public set Ellipsis(value: token.Pos) { + this._fields.Ellipsis.value = value + } + + // position of ")" + public get Rparen(): token.Pos { + return this._fields.Rparen.value + } + public set Rparen(value: token.Pos) { + this._fields.Rparen.value = value + } + + public _fields: { + Fun: $.VarRef; + Lparen: $.VarRef; + Args: $.VarRef<$.Slice>; + Ellipsis: $.VarRef; + Rparen: $.VarRef; + } + + constructor(init?: Partial<{Args?: $.Slice, Ellipsis?: token.Pos, Fun?: Expr, Lparen?: token.Pos, Rparen?: token.Pos}>) { + this._fields = { + Fun: $.varRef(init?.Fun ?? null), + Lparen: $.varRef(init?.Lparen ?? 0 as token.Pos), + Args: $.varRef(init?.Args ?? null), + Ellipsis: $.varRef(init?.Ellipsis ?? 0 as token.Pos), + Rparen: $.varRef(init?.Rparen ?? 0 as token.Pos) + } + } + + public clone(): CallExpr { + const cloned = new CallExpr() + cloned._fields = { + Fun: $.varRef(this._fields.Fun.value), + Lparen: $.varRef(this._fields.Lparen.value), + Args: $.varRef(this._fields.Args.value), + Ellipsis: $.varRef(this._fields.Ellipsis.value), + Rparen: $.varRef(this._fields.Rparen.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.Fun!.Pos() + } + + public End(): token.Pos { + const x = this + return x.Rparen + 1 + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'CallExpr', + new CallExpr(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + CallExpr, + {"Fun": "Expr", "Lparen": "Pos", "Args": { kind: $.TypeKind.Slice, elemType: "Expr" }, "Ellipsis": "Pos", "Rparen": "Pos"} + ); +} + +// A ChanType node represents a channel type. +export class ChanType { + // position of "chan" keyword or "<-" (whichever comes first) + public get Begin(): token.Pos { + return this._fields.Begin.value + } + public set Begin(value: token.Pos) { + this._fields.Begin.value = value + } + + // position of "<-" (token.NoPos if there is no "<-") + public get Arrow(): token.Pos { + return this._fields.Arrow.value + } + public set Arrow(value: token.Pos) { + this._fields.Arrow.value = value + } + + // channel direction + public get Dir(): ChanDir { + return this._fields.Dir.value + } + public set Dir(value: ChanDir) { + this._fields.Dir.value = value + } + + // value type + public get Value(): Expr { + return this._fields.Value.value + } + public set Value(value: Expr) { + this._fields.Value.value = value + } + + public _fields: { + Begin: $.VarRef; + Arrow: $.VarRef; + Dir: $.VarRef; + Value: $.VarRef; + } + + constructor(init?: Partial<{Arrow?: token.Pos, Begin?: token.Pos, Dir?: ChanDir, Value?: Expr}>) { + this._fields = { + Begin: $.varRef(init?.Begin ?? 0 as token.Pos), + Arrow: $.varRef(init?.Arrow ?? 0 as token.Pos), + Dir: $.varRef(init?.Dir ?? new ChanDir(0)), + Value: $.varRef(init?.Value ?? null) + } + } + + public clone(): ChanType { + const cloned = new ChanType() + cloned._fields = { + Begin: $.varRef(this._fields.Begin.value), + Arrow: $.varRef(this._fields.Arrow.value), + Dir: $.varRef(this._fields.Dir.value), + Value: $.varRef(this._fields.Value.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.Begin + } + + public End(): token.Pos { + const x = this + return x.Value!.End() + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'ChanType', + new ChanType(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + ChanType, + {"Begin": "Pos", "Arrow": "Pos", "Dir": "ChanDir", "Value": "Expr"} + ); +} + +// A CompositeLit node represents a composite literal. +export class CompositeLit { + // literal type; or nil + public get Type(): Expr { + return this._fields.Type.value + } + public set Type(value: Expr) { + this._fields.Type.value = value + } + + // position of "{" + public get Lbrace(): token.Pos { + return this._fields.Lbrace.value + } + public set Lbrace(value: token.Pos) { + this._fields.Lbrace.value = value + } + + // list of composite elements; or nil + public get Elts(): $.Slice { + return this._fields.Elts.value + } + public set Elts(value: $.Slice) { + this._fields.Elts.value = value + } + + // position of "}" + public get Rbrace(): token.Pos { + return this._fields.Rbrace.value + } + public set Rbrace(value: token.Pos) { + this._fields.Rbrace.value = value + } + + // true if (source) expressions are missing in the Elts list + public get Incomplete(): boolean { + return this._fields.Incomplete.value + } + public set Incomplete(value: boolean) { + this._fields.Incomplete.value = value + } + + public _fields: { + Type: $.VarRef; + Lbrace: $.VarRef; + Elts: $.VarRef<$.Slice>; + Rbrace: $.VarRef; + Incomplete: $.VarRef; + } + + constructor(init?: Partial<{Elts?: $.Slice, Incomplete?: boolean, Lbrace?: token.Pos, Rbrace?: token.Pos, Type?: Expr}>) { + this._fields = { + Type: $.varRef(init?.Type ?? null), + Lbrace: $.varRef(init?.Lbrace ?? 0 as token.Pos), + Elts: $.varRef(init?.Elts ?? null), + Rbrace: $.varRef(init?.Rbrace ?? 0 as token.Pos), + Incomplete: $.varRef(init?.Incomplete ?? false) + } + } + + public clone(): CompositeLit { + const cloned = new CompositeLit() + cloned._fields = { + Type: $.varRef(this._fields.Type.value), + Lbrace: $.varRef(this._fields.Lbrace.value), + Elts: $.varRef(this._fields.Elts.value), + Rbrace: $.varRef(this._fields.Rbrace.value), + Incomplete: $.varRef(this._fields.Incomplete.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + if (x.Type != null) { + return x.Type!.Pos() + } + return x.Lbrace + } + + public End(): token.Pos { + const x = this + return x.Rbrace + 1 + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'CompositeLit', + new CompositeLit(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + CompositeLit, + {"Type": "Expr", "Lbrace": "Pos", "Elts": { kind: $.TypeKind.Slice, elemType: "Expr" }, "Rbrace": "Pos", "Incomplete": { kind: $.TypeKind.Basic, name: "boolean" }} + ); +} + +// An Ellipsis node stands for the "..." type in a +// parameter list or the "..." length in an array type. +// +export class Ellipsis { + // position of "..." + public get Ellipsis(): token.Pos { + return this._fields.Ellipsis.value + } + public set Ellipsis(value: token.Pos) { + this._fields.Ellipsis.value = value + } + + // ellipsis element type (parameter lists only); or nil + public get Elt(): Expr { + return this._fields.Elt.value + } + public set Elt(value: Expr) { + this._fields.Elt.value = value + } + + public _fields: { + Ellipsis: $.VarRef; + Elt: $.VarRef; + } + + constructor(init?: Partial<{Ellipsis?: token.Pos, Elt?: Expr}>) { + this._fields = { + Ellipsis: $.varRef(init?.Ellipsis ?? 0 as token.Pos), + Elt: $.varRef(init?.Elt ?? null) + } + } + + public clone(): Ellipsis { + const cloned = new Ellipsis() + cloned._fields = { + Ellipsis: $.varRef(this._fields.Ellipsis.value), + Elt: $.varRef(this._fields.Elt.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.Ellipsis + } + + public End(): token.Pos { + const x = this + if (x.Elt != null) { + return x.Elt!.End() + } + return x.Ellipsis + 3 + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'Ellipsis', + new Ellipsis(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + Ellipsis, + {"Ellipsis": "Pos", "Elt": "Expr"} + ); +} + +// An ExprStmt node represents a (stand-alone) expression +// in a statement list. +// +export class ExprStmt { + // expression + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + public _fields: { + X: $.VarRef; + } + + constructor(init?: Partial<{X?: Expr}>) { + this._fields = { + X: $.varRef(init?.X ?? null) + } + } + + public clone(): ExprStmt { + const cloned = new ExprStmt() + cloned._fields = { + X: $.varRef(this._fields.X.value) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.X!.Pos() + } + + public End(): token.Pos { + const s = this + return s.X!.End() + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'ExprStmt', + new ExprStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + ExprStmt, + {"X": "Expr"} + ); +} + +export class Field { + // associated documentation; or nil + public get Doc(): CommentGroup | null { + return this._fields.Doc.value + } + public set Doc(value: CommentGroup | null) { + this._fields.Doc.value = value + } + + // field/method/(type) parameter names; or nil + public get Names(): $.Slice { + return this._fields.Names.value + } + public set Names(value: $.Slice) { + this._fields.Names.value = value + } + + // field/method/parameter type; or nil + public get Type(): Expr { + return this._fields.Type.value + } + public set Type(value: Expr) { + this._fields.Type.value = value + } + + // field tag; or nil + public get Tag(): BasicLit | null { + return this._fields.Tag.value + } + public set Tag(value: BasicLit | null) { + this._fields.Tag.value = value + } + + // line comments; or nil + public get Comment(): CommentGroup | null { + return this._fields.Comment.value + } + public set Comment(value: CommentGroup | null) { + this._fields.Comment.value = value + } + + public _fields: { + Doc: $.VarRef; + Names: $.VarRef<$.Slice>; + Type: $.VarRef; + Tag: $.VarRef; + Comment: $.VarRef; + } + + constructor(init?: Partial<{Comment?: CommentGroup | null, Doc?: CommentGroup | null, Names?: $.Slice, Tag?: BasicLit | null, Type?: Expr}>) { + this._fields = { + Doc: $.varRef(init?.Doc ?? null), + Names: $.varRef(init?.Names ?? null), + Type: $.varRef(init?.Type ?? null), + Tag: $.varRef(init?.Tag ?? null), + Comment: $.varRef(init?.Comment ?? null) + } + } + + public clone(): Field { + const cloned = new Field() + cloned._fields = { + Doc: $.varRef(this._fields.Doc.value ? $.markAsStructValue(this._fields.Doc.value.clone()) : null), + Names: $.varRef(this._fields.Names.value), + Type: $.varRef(this._fields.Type.value), + Tag: $.varRef(this._fields.Tag.value ? $.markAsStructValue(this._fields.Tag.value.clone()) : null), + Comment: $.varRef(this._fields.Comment.value ? $.markAsStructValue(this._fields.Comment.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const f = this + if ($.len(f.Names) > 0) { + return f.Names![0]!.Pos() + } + if (f.Type != null) { + return f.Type!.Pos() + } + return token.NoPos + } + + public End(): token.Pos { + const f = this + if (f.Tag != null) { + return f.Tag!.End() + } + if (f.Type != null) { + return f.Type!.End() + } + if ($.len(f.Names) > 0) { + return f.Names![$.len(f.Names) - 1]!.End() + } + return token.NoPos + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'Field', + new Field(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }], + Field, + {"Doc": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }, "Names": { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "Ident" } }, "Type": "Expr", "Tag": { kind: $.TypeKind.Pointer, elemType: "BasicLit" }, "Comment": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }} + ); +} + +// An IncDecStmt node represents an increment or decrement statement. +export class IncDecStmt { + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + // position of Tok + public get TokPos(): token.Pos { + return this._fields.TokPos.value + } + public set TokPos(value: token.Pos) { + this._fields.TokPos.value = value + } + + // INC or DEC + public get Tok(): token.Token { + return this._fields.Tok.value + } + public set Tok(value: token.Token) { + this._fields.Tok.value = value + } + + public _fields: { + X: $.VarRef; + TokPos: $.VarRef; + Tok: $.VarRef; + } + + constructor(init?: Partial<{Tok?: token.Token, TokPos?: token.Pos, X?: Expr}>) { + this._fields = { + X: $.varRef(init?.X ?? null), + TokPos: $.varRef(init?.TokPos ?? 0 as token.Pos), + Tok: $.varRef(init?.Tok ?? 0 as token.Token) + } + } + + public clone(): IncDecStmt { + const cloned = new IncDecStmt() + cloned._fields = { + X: $.varRef(this._fields.X.value), + TokPos: $.varRef(this._fields.TokPos.value), + Tok: $.varRef(this._fields.Tok.value) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.X!.Pos() + } + + public End(): token.Pos { + const s = this + return s.TokPos + 2 + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'IncDecStmt', + new IncDecStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + IncDecStmt, + {"X": "Expr", "TokPos": "Pos", "Tok": "Token"} + ); +} + +// An IndexExpr node represents an expression followed by an index. +export class IndexExpr { + // expression + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + // position of "[" + public get Lbrack(): token.Pos { + return this._fields.Lbrack.value + } + public set Lbrack(value: token.Pos) { + this._fields.Lbrack.value = value + } + + // index expression + public get Index(): Expr { + return this._fields.Index.value + } + public set Index(value: Expr) { + this._fields.Index.value = value + } + + // position of "]" + public get Rbrack(): token.Pos { + return this._fields.Rbrack.value + } + public set Rbrack(value: token.Pos) { + this._fields.Rbrack.value = value + } + + public _fields: { + X: $.VarRef; + Lbrack: $.VarRef; + Index: $.VarRef; + Rbrack: $.VarRef; + } + + constructor(init?: Partial<{Index?: Expr, Lbrack?: token.Pos, Rbrack?: token.Pos, X?: Expr}>) { + this._fields = { + X: $.varRef(init?.X ?? null), + Lbrack: $.varRef(init?.Lbrack ?? 0 as token.Pos), + Index: $.varRef(init?.Index ?? null), + Rbrack: $.varRef(init?.Rbrack ?? 0 as token.Pos) + } + } + + public clone(): IndexExpr { + const cloned = new IndexExpr() + cloned._fields = { + X: $.varRef(this._fields.X.value), + Lbrack: $.varRef(this._fields.Lbrack.value), + Index: $.varRef(this._fields.Index.value), + Rbrack: $.varRef(this._fields.Rbrack.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.X!.Pos() + } + + public End(): token.Pos { + const x = this + return x.Rbrack + 1 + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'IndexExpr', + new IndexExpr(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + IndexExpr, + {"X": "Expr", "Lbrack": "Pos", "Index": "Expr", "Rbrack": "Pos"} + ); +} + +// An IndexListExpr node represents an expression followed by multiple +// indices. +export class IndexListExpr { + // expression + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + // position of "[" + public get Lbrack(): token.Pos { + return this._fields.Lbrack.value + } + public set Lbrack(value: token.Pos) { + this._fields.Lbrack.value = value + } + + // index expressions + public get Indices(): $.Slice { + return this._fields.Indices.value + } + public set Indices(value: $.Slice) { + this._fields.Indices.value = value + } + + // position of "]" + public get Rbrack(): token.Pos { + return this._fields.Rbrack.value + } + public set Rbrack(value: token.Pos) { + this._fields.Rbrack.value = value + } + + public _fields: { + X: $.VarRef; + Lbrack: $.VarRef; + Indices: $.VarRef<$.Slice>; + Rbrack: $.VarRef; + } + + constructor(init?: Partial<{Indices?: $.Slice, Lbrack?: token.Pos, Rbrack?: token.Pos, X?: Expr}>) { + this._fields = { + X: $.varRef(init?.X ?? null), + Lbrack: $.varRef(init?.Lbrack ?? 0 as token.Pos), + Indices: $.varRef(init?.Indices ?? null), + Rbrack: $.varRef(init?.Rbrack ?? 0 as token.Pos) + } + } + + public clone(): IndexListExpr { + const cloned = new IndexListExpr() + cloned._fields = { + X: $.varRef(this._fields.X.value), + Lbrack: $.varRef(this._fields.Lbrack.value), + Indices: $.varRef(this._fields.Indices.value), + Rbrack: $.varRef(this._fields.Rbrack.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.X!.Pos() + } + + public End(): token.Pos { + const x = this + return x.Rbrack + 1 + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'IndexListExpr', + new IndexListExpr(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + IndexListExpr, + {"X": "Expr", "Lbrack": "Pos", "Indices": { kind: $.TypeKind.Slice, elemType: "Expr" }, "Rbrack": "Pos"} + ); +} + +// A KeyValueExpr node represents (key : value) pairs +// in composite literals. +// +export class KeyValueExpr { + public get Key(): Expr { + return this._fields.Key.value + } + public set Key(value: Expr) { + this._fields.Key.value = value + } + + // position of ":" + public get Colon(): token.Pos { + return this._fields.Colon.value + } + public set Colon(value: token.Pos) { + this._fields.Colon.value = value + } + + public get Value(): Expr { + return this._fields.Value.value + } + public set Value(value: Expr) { + this._fields.Value.value = value + } + + public _fields: { + Key: $.VarRef; + Colon: $.VarRef; + Value: $.VarRef; + } + + constructor(init?: Partial<{Colon?: token.Pos, Key?: Expr, Value?: Expr}>) { + this._fields = { + Key: $.varRef(init?.Key ?? null), + Colon: $.varRef(init?.Colon ?? 0 as token.Pos), + Value: $.varRef(init?.Value ?? null) + } + } + + public clone(): KeyValueExpr { + const cloned = new KeyValueExpr() + cloned._fields = { + Key: $.varRef(this._fields.Key.value), + Colon: $.varRef(this._fields.Colon.value), + Value: $.varRef(this._fields.Value.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.Key!.Pos() + } + + public End(): token.Pos { + const x = this + return x.Value!.End() + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'KeyValueExpr', + new KeyValueExpr(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + KeyValueExpr, + {"Key": "Expr", "Colon": "Pos", "Value": "Expr"} + ); +} + +// A MapType node represents a map type. +export class MapType { + // position of "map" keyword + public get Map(): token.Pos { + return this._fields.Map.value + } + public set Map(value: token.Pos) { + this._fields.Map.value = value + } + + public get Key(): Expr { + return this._fields.Key.value + } + public set Key(value: Expr) { + this._fields.Key.value = value + } + + public get Value(): Expr { + return this._fields.Value.value + } + public set Value(value: Expr) { + this._fields.Value.value = value + } + + public _fields: { + Map: $.VarRef; + Key: $.VarRef; + Value: $.VarRef; + } + + constructor(init?: Partial<{Key?: Expr, Map?: token.Pos, Value?: Expr}>) { + this._fields = { + Map: $.varRef(init?.Map ?? 0 as token.Pos), + Key: $.varRef(init?.Key ?? null), + Value: $.varRef(init?.Value ?? null) + } + } + + public clone(): MapType { + const cloned = new MapType() + cloned._fields = { + Map: $.varRef(this._fields.Map.value), + Key: $.varRef(this._fields.Key.value), + Value: $.varRef(this._fields.Value.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.Map + } + + public End(): token.Pos { + const x = this + return x.Value!.End() + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'MapType', + new MapType(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + MapType, + {"Map": "Pos", "Key": "Expr", "Value": "Expr"} + ); +} + +// A ParenExpr node represents a parenthesized expression. +export class ParenExpr { + // position of "(" + public get Lparen(): token.Pos { + return this._fields.Lparen.value + } + public set Lparen(value: token.Pos) { + this._fields.Lparen.value = value + } + + // parenthesized expression + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + // position of ")" + public get Rparen(): token.Pos { + return this._fields.Rparen.value + } + public set Rparen(value: token.Pos) { + this._fields.Rparen.value = value + } + + public _fields: { + Lparen: $.VarRef; + X: $.VarRef; + Rparen: $.VarRef; + } + + constructor(init?: Partial<{Lparen?: token.Pos, Rparen?: token.Pos, X?: Expr}>) { + this._fields = { + Lparen: $.varRef(init?.Lparen ?? 0 as token.Pos), + X: $.varRef(init?.X ?? null), + Rparen: $.varRef(init?.Rparen ?? 0 as token.Pos) + } + } + + public clone(): ParenExpr { + const cloned = new ParenExpr() + cloned._fields = { + Lparen: $.varRef(this._fields.Lparen.value), + X: $.varRef(this._fields.X.value), + Rparen: $.varRef(this._fields.Rparen.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.Lparen + } + + public End(): token.Pos { + const x = this + return x.Rparen + 1 + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'ParenExpr', + new ParenExpr(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + ParenExpr, + {"Lparen": "Pos", "X": "Expr", "Rparen": "Pos"} + ); +} + +// A RangeStmt represents a for statement with a range clause. +export class RangeStmt { + // position of "for" keyword + public get For(): token.Pos { + return this._fields.For.value + } + public set For(value: token.Pos) { + this._fields.For.value = value + } + + // Key, Value may be nil + public get Key(): Expr { + return this._fields.Key.value + } + public set Key(value: Expr) { + this._fields.Key.value = value + } + + // Key, Value may be nil + public get Value(): Expr { + return this._fields.Value.value + } + public set Value(value: Expr) { + this._fields.Value.value = value + } + + // position of Tok; invalid if Key == nil + public get TokPos(): token.Pos { + return this._fields.TokPos.value + } + public set TokPos(value: token.Pos) { + this._fields.TokPos.value = value + } + + // ILLEGAL if Key == nil, ASSIGN, DEFINE + public get Tok(): token.Token { + return this._fields.Tok.value + } + public set Tok(value: token.Token) { + this._fields.Tok.value = value + } + + // position of "range" keyword + public get Range(): token.Pos { + return this._fields.Range.value + } + public set Range(value: token.Pos) { + this._fields.Range.value = value + } + + // value to range over + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + public get Body(): BlockStmt | null { + return this._fields.Body.value + } + public set Body(value: BlockStmt | null) { + this._fields.Body.value = value + } + + public _fields: { + For: $.VarRef; + Key: $.VarRef; + Value: $.VarRef; + TokPos: $.VarRef; + Tok: $.VarRef; + Range: $.VarRef; + X: $.VarRef; + Body: $.VarRef; + } + + constructor(init?: Partial<{Body?: BlockStmt | null, For?: token.Pos, Key?: Expr, Range?: token.Pos, Tok?: token.Token, TokPos?: token.Pos, Value?: Expr, X?: Expr}>) { + this._fields = { + For: $.varRef(init?.For ?? 0 as token.Pos), + Key: $.varRef(init?.Key ?? null), + Value: $.varRef(init?.Value ?? null), + TokPos: $.varRef(init?.TokPos ?? 0 as token.Pos), + Tok: $.varRef(init?.Tok ?? 0 as token.Token), + Range: $.varRef(init?.Range ?? 0 as token.Pos), + X: $.varRef(init?.X ?? null), + Body: $.varRef(init?.Body ?? null) + } + } + + public clone(): RangeStmt { + const cloned = new RangeStmt() + cloned._fields = { + For: $.varRef(this._fields.For.value), + Key: $.varRef(this._fields.Key.value), + Value: $.varRef(this._fields.Value.value), + TokPos: $.varRef(this._fields.TokPos.value), + Tok: $.varRef(this._fields.Tok.value), + Range: $.varRef(this._fields.Range.value), + X: $.varRef(this._fields.X.value), + Body: $.varRef(this._fields.Body.value ? $.markAsStructValue(this._fields.Body.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.For + } + + public End(): token.Pos { + const s = this + return s.Body!.End() + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'RangeStmt', + new RangeStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + RangeStmt, + {"For": "Pos", "Key": "Expr", "Value": "Expr", "TokPos": "Pos", "Tok": "Token", "Range": "Pos", "X": "Expr", "Body": { kind: $.TypeKind.Pointer, elemType: "BlockStmt" }} + ); +} + +// A ReturnStmt node represents a return statement. +export class ReturnStmt { + // position of "return" keyword + public get Return(): token.Pos { + return this._fields.Return.value + } + public set Return(value: token.Pos) { + this._fields.Return.value = value + } + + // result expressions; or nil + public get Results(): $.Slice { + return this._fields.Results.value + } + public set Results(value: $.Slice) { + this._fields.Results.value = value + } + + public _fields: { + Return: $.VarRef; + Results: $.VarRef<$.Slice>; + } + + constructor(init?: Partial<{Results?: $.Slice, Return?: token.Pos}>) { + this._fields = { + Return: $.varRef(init?.Return ?? 0 as token.Pos), + Results: $.varRef(init?.Results ?? null) + } + } + + public clone(): ReturnStmt { + const cloned = new ReturnStmt() + cloned._fields = { + Return: $.varRef(this._fields.Return.value), + Results: $.varRef(this._fields.Results.value) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Return + } + + public End(): token.Pos { + const s = this + { + let n = $.len(s.Results) + if (n > 0) { + return s.Results![n - 1]!.End() + } + } + return s.Return + 6 + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'ReturnStmt', + new ReturnStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + ReturnStmt, + {"Return": "Pos", "Results": { kind: $.TypeKind.Slice, elemType: "Expr" }} + ); +} + +// A SelectorExpr node represents an expression followed by a selector. +export class SelectorExpr { + // expression + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + // field selector + public get Sel(): Ident | null { + return this._fields.Sel.value + } + public set Sel(value: Ident | null) { + this._fields.Sel.value = value + } + + public _fields: { + X: $.VarRef; + Sel: $.VarRef; + } + + constructor(init?: Partial<{Sel?: Ident | null, X?: Expr}>) { + this._fields = { + X: $.varRef(init?.X ?? null), + Sel: $.varRef(init?.Sel ?? null) + } + } + + public clone(): SelectorExpr { + const cloned = new SelectorExpr() + cloned._fields = { + X: $.varRef(this._fields.X.value), + Sel: $.varRef(this._fields.Sel.value ? $.markAsStructValue(this._fields.Sel.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.X!.Pos() + } + + public End(): token.Pos { + const x = this + return x.Sel!.End() + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'SelectorExpr', + new SelectorExpr(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + SelectorExpr, + {"X": "Expr", "Sel": { kind: $.TypeKind.Pointer, elemType: "Ident" }} + ); +} + +// A SendStmt node represents a send statement. +export class SendStmt { + public get Chan(): Expr { + return this._fields.Chan.value + } + public set Chan(value: Expr) { + this._fields.Chan.value = value + } + + // position of "<-" + public get Arrow(): token.Pos { + return this._fields.Arrow.value + } + public set Arrow(value: token.Pos) { + this._fields.Arrow.value = value + } + + public get Value(): Expr { + return this._fields.Value.value + } + public set Value(value: Expr) { + this._fields.Value.value = value + } + + public _fields: { + Chan: $.VarRef; + Arrow: $.VarRef; + Value: $.VarRef; + } + + constructor(init?: Partial<{Arrow?: token.Pos, Chan?: Expr, Value?: Expr}>) { + this._fields = { + Chan: $.varRef(init?.Chan ?? null), + Arrow: $.varRef(init?.Arrow ?? 0 as token.Pos), + Value: $.varRef(init?.Value ?? null) + } + } + + public clone(): SendStmt { + const cloned = new SendStmt() + cloned._fields = { + Chan: $.varRef(this._fields.Chan.value), + Arrow: $.varRef(this._fields.Arrow.value), + Value: $.varRef(this._fields.Value.value) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Chan!.Pos() + } + + public End(): token.Pos { + const s = this + return s.Value!.End() + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'SendStmt', + new SendStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + SendStmt, + {"Chan": "Expr", "Arrow": "Pos", "Value": "Expr"} + ); +} + +// A SliceExpr node represents an expression followed by slice indices. +export class SliceExpr { + // expression + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + // position of "[" + public get Lbrack(): token.Pos { + return this._fields.Lbrack.value + } + public set Lbrack(value: token.Pos) { + this._fields.Lbrack.value = value + } + + // begin of slice range; or nil + public get Low(): Expr { + return this._fields.Low.value + } + public set Low(value: Expr) { + this._fields.Low.value = value + } + + // end of slice range; or nil + public get High(): Expr { + return this._fields.High.value + } + public set High(value: Expr) { + this._fields.High.value = value + } + + // maximum capacity of slice; or nil + public get Max(): Expr { + return this._fields.Max.value + } + public set Max(value: Expr) { + this._fields.Max.value = value + } + + // true if 3-index slice (2 colons present) + public get Slice3(): boolean { + return this._fields.Slice3.value + } + public set Slice3(value: boolean) { + this._fields.Slice3.value = value + } + + // position of "]" + public get Rbrack(): token.Pos { + return this._fields.Rbrack.value + } + public set Rbrack(value: token.Pos) { + this._fields.Rbrack.value = value + } + + public _fields: { + X: $.VarRef; + Lbrack: $.VarRef; + Low: $.VarRef; + High: $.VarRef; + Max: $.VarRef; + Slice3: $.VarRef; + Rbrack: $.VarRef; + } + + constructor(init?: Partial<{High?: Expr, Lbrack?: token.Pos, Low?: Expr, Max?: Expr, Rbrack?: token.Pos, Slice3?: boolean, X?: Expr}>) { + this._fields = { + X: $.varRef(init?.X ?? null), + Lbrack: $.varRef(init?.Lbrack ?? 0 as token.Pos), + Low: $.varRef(init?.Low ?? null), + High: $.varRef(init?.High ?? null), + Max: $.varRef(init?.Max ?? null), + Slice3: $.varRef(init?.Slice3 ?? false), + Rbrack: $.varRef(init?.Rbrack ?? 0 as token.Pos) + } + } + + public clone(): SliceExpr { + const cloned = new SliceExpr() + cloned._fields = { + X: $.varRef(this._fields.X.value), + Lbrack: $.varRef(this._fields.Lbrack.value), + Low: $.varRef(this._fields.Low.value), + High: $.varRef(this._fields.High.value), + Max: $.varRef(this._fields.Max.value), + Slice3: $.varRef(this._fields.Slice3.value), + Rbrack: $.varRef(this._fields.Rbrack.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.X!.Pos() + } + + public End(): token.Pos { + const x = this + return x.Rbrack + 1 + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'SliceExpr', + new SliceExpr(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + SliceExpr, + {"X": "Expr", "Lbrack": "Pos", "Low": "Expr", "High": "Expr", "Max": "Expr", "Slice3": { kind: $.TypeKind.Basic, name: "boolean" }, "Rbrack": "Pos"} + ); +} + +// A StarExpr node represents an expression of the form "*" Expression. +// Semantically it could be a unary "*" expression, or a pointer type. +// +export class StarExpr { + // position of "*" + public get Star(): token.Pos { + return this._fields.Star.value + } + public set Star(value: token.Pos) { + this._fields.Star.value = value + } + + // operand + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + public _fields: { + Star: $.VarRef; + X: $.VarRef; + } + + constructor(init?: Partial<{Star?: token.Pos, X?: Expr}>) { + this._fields = { + Star: $.varRef(init?.Star ?? 0 as token.Pos), + X: $.varRef(init?.X ?? null) + } + } + + public clone(): StarExpr { + const cloned = new StarExpr() + cloned._fields = { + Star: $.varRef(this._fields.Star.value), + X: $.varRef(this._fields.X.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.Star + } + + public End(): token.Pos { + const x = this + return x.X!.End() + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'StarExpr', + new StarExpr(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + StarExpr, + {"Star": "Pos", "X": "Expr"} + ); +} + +// A TypeAssertExpr node represents an expression followed by a +// type assertion. +// +export class TypeAssertExpr { + // expression + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + // position of "(" + public get Lparen(): token.Pos { + return this._fields.Lparen.value + } + public set Lparen(value: token.Pos) { + this._fields.Lparen.value = value + } + + // asserted type; nil means type switch X.(type) + public get Type(): Expr { + return this._fields.Type.value + } + public set Type(value: Expr) { + this._fields.Type.value = value + } + + // position of ")" + public get Rparen(): token.Pos { + return this._fields.Rparen.value + } + public set Rparen(value: token.Pos) { + this._fields.Rparen.value = value + } + + public _fields: { + X: $.VarRef; + Lparen: $.VarRef; + Type: $.VarRef; + Rparen: $.VarRef; + } + + constructor(init?: Partial<{Lparen?: token.Pos, Rparen?: token.Pos, Type?: Expr, X?: Expr}>) { + this._fields = { + X: $.varRef(init?.X ?? null), + Lparen: $.varRef(init?.Lparen ?? 0 as token.Pos), + Type: $.varRef(init?.Type ?? null), + Rparen: $.varRef(init?.Rparen ?? 0 as token.Pos) + } + } + + public clone(): TypeAssertExpr { + const cloned = new TypeAssertExpr() + cloned._fields = { + X: $.varRef(this._fields.X.value), + Lparen: $.varRef(this._fields.Lparen.value), + Type: $.varRef(this._fields.Type.value), + Rparen: $.varRef(this._fields.Rparen.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.X!.Pos() + } + + public End(): token.Pos { + const x = this + return x.Rparen + 1 + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'TypeAssertExpr', + new TypeAssertExpr(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + TypeAssertExpr, + {"X": "Expr", "Lparen": "Pos", "Type": "Expr", "Rparen": "Pos"} + ); +} + +// A TypeSpec node represents a type declaration (TypeSpec production). +export class TypeSpec { + // associated documentation; or nil + public get Doc(): CommentGroup | null { + return this._fields.Doc.value + } + public set Doc(value: CommentGroup | null) { + this._fields.Doc.value = value + } + + // type name + public get Name(): Ident | null { + return this._fields.Name.value + } + public set Name(value: Ident | null) { + this._fields.Name.value = value + } + + // type parameters; or nil + public get TypeParams(): FieldList | null { + return this._fields.TypeParams.value + } + public set TypeParams(value: FieldList | null) { + this._fields.TypeParams.value = value + } + + // position of '=', if any + public get Assign(): token.Pos { + return this._fields.Assign.value + } + public set Assign(value: token.Pos) { + this._fields.Assign.value = value + } + + // *Ident, *ParenExpr, *SelectorExpr, *StarExpr, or any of the *XxxTypes + public get Type(): Expr { + return this._fields.Type.value + } + public set Type(value: Expr) { + this._fields.Type.value = value + } + + // line comments; or nil + public get Comment(): CommentGroup | null { + return this._fields.Comment.value + } + public set Comment(value: CommentGroup | null) { + this._fields.Comment.value = value + } + + public _fields: { + Doc: $.VarRef; + Name: $.VarRef; + TypeParams: $.VarRef; + Assign: $.VarRef; + Type: $.VarRef; + Comment: $.VarRef; + } + + constructor(init?: Partial<{Assign?: token.Pos, Comment?: CommentGroup | null, Doc?: CommentGroup | null, Name?: Ident | null, Type?: Expr, TypeParams?: FieldList | null}>) { + this._fields = { + Doc: $.varRef(init?.Doc ?? null), + Name: $.varRef(init?.Name ?? null), + TypeParams: $.varRef(init?.TypeParams ?? null), + Assign: $.varRef(init?.Assign ?? 0 as token.Pos), + Type: $.varRef(init?.Type ?? null), + Comment: $.varRef(init?.Comment ?? null) + } + } + + public clone(): TypeSpec { + const cloned = new TypeSpec() + cloned._fields = { + Doc: $.varRef(this._fields.Doc.value ? $.markAsStructValue(this._fields.Doc.value.clone()) : null), + Name: $.varRef(this._fields.Name.value ? $.markAsStructValue(this._fields.Name.value.clone()) : null), + TypeParams: $.varRef(this._fields.TypeParams.value ? $.markAsStructValue(this._fields.TypeParams.value.clone()) : null), + Assign: $.varRef(this._fields.Assign.value), + Type: $.varRef(this._fields.Type.value), + Comment: $.varRef(this._fields.Comment.value ? $.markAsStructValue(this._fields.Comment.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Name!.Pos() + } + + public End(): token.Pos { + const s = this + return s.Type!.End() + } + + public specNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'TypeSpec', + new TypeSpec(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "specNode", args: [], returns: [] }], + TypeSpec, + {"Doc": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }, "Name": { kind: $.TypeKind.Pointer, elemType: "Ident" }, "TypeParams": { kind: $.TypeKind.Pointer, elemType: "FieldList" }, "Assign": "Pos", "Type": "Expr", "Comment": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }} + ); +} + +// A UnaryExpr node represents a unary expression. +// Unary "*" expressions are represented via StarExpr nodes. +// +export class UnaryExpr { + // position of Op + public get OpPos(): token.Pos { + return this._fields.OpPos.value + } + public set OpPos(value: token.Pos) { + this._fields.OpPos.value = value + } + + // operator + public get Op(): token.Token { + return this._fields.Op.value + } + public set Op(value: token.Token) { + this._fields.Op.value = value + } + + // operand + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + public _fields: { + OpPos: $.VarRef; + Op: $.VarRef; + X: $.VarRef; + } + + constructor(init?: Partial<{Op?: token.Token, OpPos?: token.Pos, X?: Expr}>) { + this._fields = { + OpPos: $.varRef(init?.OpPos ?? 0 as token.Pos), + Op: $.varRef(init?.Op ?? 0 as token.Token), + X: $.varRef(init?.X ?? null) + } + } + + public clone(): UnaryExpr { + const cloned = new UnaryExpr() + cloned._fields = { + OpPos: $.varRef(this._fields.OpPos.value), + Op: $.varRef(this._fields.Op.value), + X: $.varRef(this._fields.X.value) + } + return cloned + } + + public Pos(): token.Pos { + const x = this + return x.OpPos + } + + public End(): token.Pos { + const x = this + return x.X!.End() + } + + public exprNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'UnaryExpr', + new UnaryExpr(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "exprNode", args: [], returns: [] }], + UnaryExpr, + {"OpPos": "Pos", "Op": "Token", "X": "Expr"} + ); +} + +// A ValueSpec node represents a constant or variable declaration +// (ConstSpec or VarSpec production). +// +export class ValueSpec { + // associated documentation; or nil + public get Doc(): CommentGroup | null { + return this._fields.Doc.value + } + public set Doc(value: CommentGroup | null) { + this._fields.Doc.value = value + } + + // value names (len(Names) > 0) + public get Names(): $.Slice { + return this._fields.Names.value + } + public set Names(value: $.Slice) { + this._fields.Names.value = value + } + + // value type; or nil + public get Type(): Expr { + return this._fields.Type.value + } + public set Type(value: Expr) { + this._fields.Type.value = value + } + + // initial values; or nil + public get Values(): $.Slice { + return this._fields.Values.value + } + public set Values(value: $.Slice) { + this._fields.Values.value = value + } + + // line comments; or nil + public get Comment(): CommentGroup | null { + return this._fields.Comment.value + } + public set Comment(value: CommentGroup | null) { + this._fields.Comment.value = value + } + + public _fields: { + Doc: $.VarRef; + Names: $.VarRef<$.Slice>; + Type: $.VarRef; + Values: $.VarRef<$.Slice>; + Comment: $.VarRef; + } + + constructor(init?: Partial<{Comment?: CommentGroup | null, Doc?: CommentGroup | null, Names?: $.Slice, Type?: Expr, Values?: $.Slice}>) { + this._fields = { + Doc: $.varRef(init?.Doc ?? null), + Names: $.varRef(init?.Names ?? null), + Type: $.varRef(init?.Type ?? null), + Values: $.varRef(init?.Values ?? null), + Comment: $.varRef(init?.Comment ?? null) + } + } + + public clone(): ValueSpec { + const cloned = new ValueSpec() + cloned._fields = { + Doc: $.varRef(this._fields.Doc.value ? $.markAsStructValue(this._fields.Doc.value.clone()) : null), + Names: $.varRef(this._fields.Names.value), + Type: $.varRef(this._fields.Type.value), + Values: $.varRef(this._fields.Values.value), + Comment: $.varRef(this._fields.Comment.value ? $.markAsStructValue(this._fields.Comment.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Names![0]!.Pos() + } + + public End(): token.Pos { + const s = this + { + let n = $.len(s.Values) + if (n > 0) { + return s.Values![n - 1]!.End() + } + } + if (s.Type != null) { + return s.Type!.End() + } + return s.Names![$.len(s.Names) - 1]!.End() + } + + public specNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'ValueSpec', + new ValueSpec(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "specNode", args: [], returns: [] }], + ValueSpec, + {"Doc": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }, "Names": { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "Ident" } }, "Type": "Expr", "Values": { kind: $.TypeKind.Slice, elemType: "Expr" }, "Comment": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }} + ); +} + +// A GenDecl node (generic declaration node) represents an import, +// constant, type or variable declaration. A valid Lparen position +// (Lparen.IsValid()) indicates a parenthesized declaration. +// +// Relationship between Tok value and Specs element type: +// +// token.IMPORT *ImportSpec +// token.CONST *ValueSpec +// token.TYPE *TypeSpec +// token.VAR *ValueSpec +// +export class GenDecl { + // associated documentation; or nil + public get Doc(): CommentGroup | null { + return this._fields.Doc.value + } + public set Doc(value: CommentGroup | null) { + this._fields.Doc.value = value + } + + // position of Tok + public get TokPos(): token.Pos { + return this._fields.TokPos.value + } + public set TokPos(value: token.Pos) { + this._fields.TokPos.value = value + } + + // IMPORT, CONST, TYPE, or VAR + public get Tok(): token.Token { + return this._fields.Tok.value + } + public set Tok(value: token.Token) { + this._fields.Tok.value = value + } + + // position of '(', if any + public get Lparen(): token.Pos { + return this._fields.Lparen.value + } + public set Lparen(value: token.Pos) { + this._fields.Lparen.value = value + } + + public get Specs(): $.Slice { + return this._fields.Specs.value + } + public set Specs(value: $.Slice) { + this._fields.Specs.value = value + } + + // position of ')', if any + public get Rparen(): token.Pos { + return this._fields.Rparen.value + } + public set Rparen(value: token.Pos) { + this._fields.Rparen.value = value + } + + public _fields: { + Doc: $.VarRef; + TokPos: $.VarRef; + Tok: $.VarRef; + Lparen: $.VarRef; + Specs: $.VarRef<$.Slice>; + Rparen: $.VarRef; + } + + constructor(init?: Partial<{Doc?: CommentGroup | null, Lparen?: token.Pos, Rparen?: token.Pos, Specs?: $.Slice, Tok?: token.Token, TokPos?: token.Pos}>) { + this._fields = { + Doc: $.varRef(init?.Doc ?? null), + TokPos: $.varRef(init?.TokPos ?? 0 as token.Pos), + Tok: $.varRef(init?.Tok ?? 0 as token.Token), + Lparen: $.varRef(init?.Lparen ?? 0 as token.Pos), + Specs: $.varRef(init?.Specs ?? null), + Rparen: $.varRef(init?.Rparen ?? 0 as token.Pos) + } + } + + public clone(): GenDecl { + const cloned = new GenDecl() + cloned._fields = { + Doc: $.varRef(this._fields.Doc.value ? $.markAsStructValue(this._fields.Doc.value.clone()) : null), + TokPos: $.varRef(this._fields.TokPos.value), + Tok: $.varRef(this._fields.Tok.value), + Lparen: $.varRef(this._fields.Lparen.value), + Specs: $.varRef(this._fields.Specs.value), + Rparen: $.varRef(this._fields.Rparen.value) + } + return cloned + } + + public Pos(): token.Pos { + const d = this + return d.TokPos + } + + public End(): token.Pos { + const d = this + if (token.Pos_IsValid(d.Rparen)) { + return d.Rparen + 1 + } + return d.Specs![0]!.End() + } + + public declNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'GenDecl', + new GenDecl(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "declNode", args: [], returns: [] }], + GenDecl, + {"Doc": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }, "TokPos": "Pos", "Tok": "Token", "Lparen": "Pos", "Specs": { kind: $.TypeKind.Slice, elemType: "Spec" }, "Rparen": "Pos"} + ); +} + +// A BlockStmt node represents a braced statement list. +export class BlockStmt { + // position of "{" + public get Lbrace(): token.Pos { + return this._fields.Lbrace.value + } + public set Lbrace(value: token.Pos) { + this._fields.Lbrace.value = value + } + + public get List(): $.Slice { + return this._fields.List.value + } + public set List(value: $.Slice) { + this._fields.List.value = value + } + + // position of "}", if any (may be absent due to syntax error) + public get Rbrace(): token.Pos { + return this._fields.Rbrace.value + } + public set Rbrace(value: token.Pos) { + this._fields.Rbrace.value = value + } + + public _fields: { + Lbrace: $.VarRef; + List: $.VarRef<$.Slice>; + Rbrace: $.VarRef; + } + + constructor(init?: Partial<{Lbrace?: token.Pos, List?: $.Slice, Rbrace?: token.Pos}>) { + this._fields = { + Lbrace: $.varRef(init?.Lbrace ?? 0 as token.Pos), + List: $.varRef(init?.List ?? null), + Rbrace: $.varRef(init?.Rbrace ?? 0 as token.Pos) + } + } + + public clone(): BlockStmt { + const cloned = new BlockStmt() + cloned._fields = { + Lbrace: $.varRef(this._fields.Lbrace.value), + List: $.varRef(this._fields.List.value), + Rbrace: $.varRef(this._fields.Rbrace.value) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Lbrace + } + + public End(): token.Pos { + const s = this + if (token.Pos_IsValid(s.Rbrace)) { + return s.Rbrace + 1 + } + { + let n = $.len(s.List) + if (n > 0) { + return s.List![n - 1]!.End() + } + } + return s.Lbrace + 1 + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'BlockStmt', + new BlockStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + BlockStmt, + {"Lbrace": "Pos", "List": { kind: $.TypeKind.Slice, elemType: "Stmt" }, "Rbrace": "Pos"} + ); +} + +// A CaseClause represents a case of an expression or type switch statement. +export class CaseClause { + // position of "case" or "default" keyword + public get Case(): token.Pos { + return this._fields.Case.value + } + public set Case(value: token.Pos) { + this._fields.Case.value = value + } + + // list of expressions or types; nil means default case + public get List(): $.Slice { + return this._fields.List.value + } + public set List(value: $.Slice) { + this._fields.List.value = value + } + + // position of ":" + public get Colon(): token.Pos { + return this._fields.Colon.value + } + public set Colon(value: token.Pos) { + this._fields.Colon.value = value + } + + // statement list; or nil + public get Body(): $.Slice { + return this._fields.Body.value + } + public set Body(value: $.Slice) { + this._fields.Body.value = value + } + + public _fields: { + Case: $.VarRef; + List: $.VarRef<$.Slice>; + Colon: $.VarRef; + Body: $.VarRef<$.Slice>; + } + + constructor(init?: Partial<{Body?: $.Slice, Case?: token.Pos, Colon?: token.Pos, List?: $.Slice}>) { + this._fields = { + Case: $.varRef(init?.Case ?? 0 as token.Pos), + List: $.varRef(init?.List ?? null), + Colon: $.varRef(init?.Colon ?? 0 as token.Pos), + Body: $.varRef(init?.Body ?? null) + } + } + + public clone(): CaseClause { + const cloned = new CaseClause() + cloned._fields = { + Case: $.varRef(this._fields.Case.value), + List: $.varRef(this._fields.List.value), + Colon: $.varRef(this._fields.Colon.value), + Body: $.varRef(this._fields.Body.value) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Case + } + + public End(): token.Pos { + const s = this + { + let n = $.len(s.Body) + if (n > 0) { + return s.Body![n - 1]!.End() + } + } + return s.Colon + 1 + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'CaseClause', + new CaseClause(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + CaseClause, + {"Case": "Pos", "List": { kind: $.TypeKind.Slice, elemType: "Expr" }, "Colon": "Pos", "Body": { kind: $.TypeKind.Slice, elemType: "Stmt" }} + ); +} + +// A CommClause node represents a case of a select statement. +export class CommClause { + // position of "case" or "default" keyword + public get Case(): token.Pos { + return this._fields.Case.value + } + public set Case(value: token.Pos) { + this._fields.Case.value = value + } + + // send or receive statement; nil means default case + public get Comm(): Stmt { + return this._fields.Comm.value + } + public set Comm(value: Stmt) { + this._fields.Comm.value = value + } + + // position of ":" + public get Colon(): token.Pos { + return this._fields.Colon.value + } + public set Colon(value: token.Pos) { + this._fields.Colon.value = value + } + + // statement list; or nil + public get Body(): $.Slice { + return this._fields.Body.value + } + public set Body(value: $.Slice) { + this._fields.Body.value = value + } + + public _fields: { + Case: $.VarRef; + Comm: $.VarRef; + Colon: $.VarRef; + Body: $.VarRef<$.Slice>; + } + + constructor(init?: Partial<{Body?: $.Slice, Case?: token.Pos, Colon?: token.Pos, Comm?: Stmt}>) { + this._fields = { + Case: $.varRef(init?.Case ?? 0 as token.Pos), + Comm: $.varRef(init?.Comm ?? null), + Colon: $.varRef(init?.Colon ?? 0 as token.Pos), + Body: $.varRef(init?.Body ?? null) + } + } + + public clone(): CommClause { + const cloned = new CommClause() + cloned._fields = { + Case: $.varRef(this._fields.Case.value), + Comm: $.varRef(this._fields.Comm.value), + Colon: $.varRef(this._fields.Colon.value), + Body: $.varRef(this._fields.Body.value) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Case + } + + public End(): token.Pos { + const s = this + { + let n = $.len(s.Body) + if (n > 0) { + return s.Body![n - 1]!.End() + } + } + return s.Colon + 1 + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'CommClause', + new CommClause(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + CommClause, + {"Case": "Pos", "Comm": "Stmt", "Colon": "Pos", "Body": { kind: $.TypeKind.Slice, elemType: "Stmt" }} + ); +} + +// A ForStmt represents a for statement. +export class ForStmt { + // position of "for" keyword + public get For(): token.Pos { + return this._fields.For.value + } + public set For(value: token.Pos) { + this._fields.For.value = value + } + + // initialization statement; or nil + public get Init(): Stmt { + return this._fields.Init.value + } + public set Init(value: Stmt) { + this._fields.Init.value = value + } + + // condition; or nil + public get Cond(): Expr { + return this._fields.Cond.value + } + public set Cond(value: Expr) { + this._fields.Cond.value = value + } + + // post iteration statement; or nil + public get Post(): Stmt { + return this._fields.Post.value + } + public set Post(value: Stmt) { + this._fields.Post.value = value + } + + public get Body(): BlockStmt | null { + return this._fields.Body.value + } + public set Body(value: BlockStmt | null) { + this._fields.Body.value = value + } + + public _fields: { + For: $.VarRef; + Init: $.VarRef; + Cond: $.VarRef; + Post: $.VarRef; + Body: $.VarRef; + } + + constructor(init?: Partial<{Body?: BlockStmt | null, Cond?: Expr, For?: token.Pos, Init?: Stmt, Post?: Stmt}>) { + this._fields = { + For: $.varRef(init?.For ?? 0 as token.Pos), + Init: $.varRef(init?.Init ?? null), + Cond: $.varRef(init?.Cond ?? null), + Post: $.varRef(init?.Post ?? null), + Body: $.varRef(init?.Body ?? null) + } + } + + public clone(): ForStmt { + const cloned = new ForStmt() + cloned._fields = { + For: $.varRef(this._fields.For.value), + Init: $.varRef(this._fields.Init.value), + Cond: $.varRef(this._fields.Cond.value), + Post: $.varRef(this._fields.Post.value), + Body: $.varRef(this._fields.Body.value ? $.markAsStructValue(this._fields.Body.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.For + } + + public End(): token.Pos { + const s = this + return s.Body!.End() + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'ForStmt', + new ForStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + ForStmt, + {"For": "Pos", "Init": "Stmt", "Cond": "Expr", "Post": "Stmt", "Body": { kind: $.TypeKind.Pointer, elemType: "BlockStmt" }} + ); +} + +// An IfStmt node represents an if statement. +export class IfStmt { + // position of "if" keyword + public get If(): token.Pos { + return this._fields.If.value + } + public set If(value: token.Pos) { + this._fields.If.value = value + } + + // initialization statement; or nil + public get Init(): Stmt { + return this._fields.Init.value + } + public set Init(value: Stmt) { + this._fields.Init.value = value + } + + // condition + public get Cond(): Expr { + return this._fields.Cond.value + } + public set Cond(value: Expr) { + this._fields.Cond.value = value + } + + public get Body(): BlockStmt | null { + return this._fields.Body.value + } + public set Body(value: BlockStmt | null) { + this._fields.Body.value = value + } + + // else branch; or nil + public get Else(): Stmt { + return this._fields.Else.value + } + public set Else(value: Stmt) { + this._fields.Else.value = value + } + + public _fields: { + If: $.VarRef; + Init: $.VarRef; + Cond: $.VarRef; + Body: $.VarRef; + Else: $.VarRef; + } + + constructor(init?: Partial<{Body?: BlockStmt | null, Cond?: Expr, Else?: Stmt, If?: token.Pos, Init?: Stmt}>) { + this._fields = { + If: $.varRef(init?.If ?? 0 as token.Pos), + Init: $.varRef(init?.Init ?? null), + Cond: $.varRef(init?.Cond ?? null), + Body: $.varRef(init?.Body ?? null), + Else: $.varRef(init?.Else ?? null) + } + } + + public clone(): IfStmt { + const cloned = new IfStmt() + cloned._fields = { + If: $.varRef(this._fields.If.value), + Init: $.varRef(this._fields.Init.value), + Cond: $.varRef(this._fields.Cond.value), + Body: $.varRef(this._fields.Body.value ? $.markAsStructValue(this._fields.Body.value.clone()) : null), + Else: $.varRef(this._fields.Else.value) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.If + } + + public End(): token.Pos { + const s = this + if (s.Else != null) { + return s.Else!.End() + } + return s.Body!.End() + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'IfStmt', + new IfStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + IfStmt, + {"If": "Pos", "Init": "Stmt", "Cond": "Expr", "Body": { kind: $.TypeKind.Pointer, elemType: "BlockStmt" }, "Else": "Stmt"} + ); +} + +// A LabeledStmt node represents a labeled statement. +export class LabeledStmt { + public get Label(): Ident | null { + return this._fields.Label.value + } + public set Label(value: Ident | null) { + this._fields.Label.value = value + } + + // position of ":" + public get Colon(): token.Pos { + return this._fields.Colon.value + } + public set Colon(value: token.Pos) { + this._fields.Colon.value = value + } + + public get Stmt(): Stmt { + return this._fields.Stmt.value + } + public set Stmt(value: Stmt) { + this._fields.Stmt.value = value + } + + public _fields: { + Label: $.VarRef; + Colon: $.VarRef; + Stmt: $.VarRef; + } + + constructor(init?: Partial<{Colon?: token.Pos, Label?: Ident | null, Stmt?: Stmt}>) { + this._fields = { + Label: $.varRef(init?.Label ?? null), + Colon: $.varRef(init?.Colon ?? 0 as token.Pos), + Stmt: $.varRef(init?.Stmt ?? null) + } + } + + public clone(): LabeledStmt { + const cloned = new LabeledStmt() + cloned._fields = { + Label: $.varRef(this._fields.Label.value ? $.markAsStructValue(this._fields.Label.value.clone()) : null), + Colon: $.varRef(this._fields.Colon.value), + Stmt: $.varRef(this._fields.Stmt.value) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Label!.Pos() + } + + public End(): token.Pos { + const s = this + return s.Stmt!.End() + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'LabeledStmt', + new LabeledStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + LabeledStmt, + {"Label": { kind: $.TypeKind.Pointer, elemType: "Ident" }, "Colon": "Pos", "Stmt": "Stmt"} + ); +} + +// A SwitchStmt node represents an expression switch statement. +export class SwitchStmt { + // position of "switch" keyword + public get Switch(): token.Pos { + return this._fields.Switch.value + } + public set Switch(value: token.Pos) { + this._fields.Switch.value = value + } + + // initialization statement; or nil + public get Init(): Stmt { + return this._fields.Init.value + } + public set Init(value: Stmt) { + this._fields.Init.value = value + } + + // tag expression; or nil + public get Tag(): Expr { + return this._fields.Tag.value + } + public set Tag(value: Expr) { + this._fields.Tag.value = value + } + + // CaseClauses only + public get Body(): BlockStmt | null { + return this._fields.Body.value + } + public set Body(value: BlockStmt | null) { + this._fields.Body.value = value + } + + public _fields: { + Switch: $.VarRef; + Init: $.VarRef; + Tag: $.VarRef; + Body: $.VarRef; + } + + constructor(init?: Partial<{Body?: BlockStmt | null, Init?: Stmt, Switch?: token.Pos, Tag?: Expr}>) { + this._fields = { + Switch: $.varRef(init?.Switch ?? 0 as token.Pos), + Init: $.varRef(init?.Init ?? null), + Tag: $.varRef(init?.Tag ?? null), + Body: $.varRef(init?.Body ?? null) + } + } + + public clone(): SwitchStmt { + const cloned = new SwitchStmt() + cloned._fields = { + Switch: $.varRef(this._fields.Switch.value), + Init: $.varRef(this._fields.Init.value), + Tag: $.varRef(this._fields.Tag.value), + Body: $.varRef(this._fields.Body.value ? $.markAsStructValue(this._fields.Body.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Switch + } + + public End(): token.Pos { + const s = this + return s.Body!.End() + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'SwitchStmt', + new SwitchStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + SwitchStmt, + {"Switch": "Pos", "Init": "Stmt", "Tag": "Expr", "Body": { kind: $.TypeKind.Pointer, elemType: "BlockStmt" }} + ); +} + +// A TypeSwitchStmt node represents a type switch statement. +export class TypeSwitchStmt { + // position of "switch" keyword + public get Switch(): token.Pos { + return this._fields.Switch.value + } + public set Switch(value: token.Pos) { + this._fields.Switch.value = value + } + + // initialization statement; or nil + public get Init(): Stmt { + return this._fields.Init.value + } + public set Init(value: Stmt) { + this._fields.Init.value = value + } + + // x := y.(type) or y.(type) + public get Assign(): Stmt { + return this._fields.Assign.value + } + public set Assign(value: Stmt) { + this._fields.Assign.value = value + } + + // CaseClauses only + public get Body(): BlockStmt | null { + return this._fields.Body.value + } + public set Body(value: BlockStmt | null) { + this._fields.Body.value = value + } + + public _fields: { + Switch: $.VarRef; + Init: $.VarRef; + Assign: $.VarRef; + Body: $.VarRef; + } + + constructor(init?: Partial<{Assign?: Stmt, Body?: BlockStmt | null, Init?: Stmt, Switch?: token.Pos}>) { + this._fields = { + Switch: $.varRef(init?.Switch ?? 0 as token.Pos), + Init: $.varRef(init?.Init ?? null), + Assign: $.varRef(init?.Assign ?? null), + Body: $.varRef(init?.Body ?? null) + } + } + + public clone(): TypeSwitchStmt { + const cloned = new TypeSwitchStmt() + cloned._fields = { + Switch: $.varRef(this._fields.Switch.value), + Init: $.varRef(this._fields.Init.value), + Assign: $.varRef(this._fields.Assign.value), + Body: $.varRef(this._fields.Body.value ? $.markAsStructValue(this._fields.Body.value.clone()) : null) + } + return cloned + } + + public Pos(): token.Pos { + const s = this + return s.Switch + } + + public End(): token.Pos { + const s = this + return s.Body!.End() + } + + public stmtNode(): void { + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'TypeSwitchStmt', + new TypeSwitchStmt(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }, { name: "End", args: [], returns: [{ type: "Pos" }] }, { name: "stmtNode", args: [], returns: [] }], + TypeSwitchStmt, + {"Switch": "Pos", "Init": "Stmt", "Assign": "Stmt", "Body": { kind: $.TypeKind.Pointer, elemType: "BlockStmt" }} + ); +} + +export function isWhitespace(ch: number): boolean { + return ch == 32 || ch == 9 || ch == 10 || ch == 13 +} + +export function stripTrailingWhitespace(s: string): string { + let i = $.len(s) + for (; i > 0 && isWhitespace($.indexString(s, i - 1)); ) { + i-- + } + return $.sliceString(s, 0, i) +} + +// isDirective reports whether c is a comment directive. +// This code is also in go/printer. +export function isDirective(c: string): boolean { + // "//line " is a line directive. + // "//extern " is for gccgo. + // "//export " is for cgo. + // (The // has been removed.) + if (strings.HasPrefix(c, "line ") || strings.HasPrefix(c, "extern ") || strings.HasPrefix(c, "export ")) { + return true + } + + // "//[a-z0-9]+:[a-z0-9]" + // (The // has been removed.) + let colon = strings.Index(c, ":") + if (colon <= 0 || colon + 1 >= $.len(c)) { + return false + } + for (let i = 0; i <= colon + 1; i++) { + if (i == colon) { + continue + } + let b = $.indexString(c, i) + if (!(97 <= b && b <= 122 || 48 <= b && b <= 57)) { + return false + } + } + return true +} + +// NewIdent creates a new [Ident] without position. +// Useful for ASTs generated by code other than the Go parser. +export function NewIdent(name: string): Ident | null { + return new Ident({}) +} + +// IsExported reports whether name starts with an upper-case letter. +export function IsExported(name: string): boolean { + return token.IsExported(name) +} + +// IsGenerated reports whether the file was generated by a program, +// not handwritten, by detecting the special comment described +// at https://go.dev/s/generatedcode. +// +// The syntax tree must have been parsed with the [parser.ParseComments] flag. +// Example: +// +// f, err := parser.ParseFile(fset, filename, src, parser.ParseComments|parser.PackageClauseOnly) +// if err != nil { ... } +// gen := ast.IsGenerated(f) +export function IsGenerated(file: File | null): boolean { + let [, ok] = generator(file) + return ok +} + +export function generator(file: File | null): [string, boolean] { + + // after package declaration + + // opt: check Contains first to avoid unnecessary array allocation in Split. + for (let _i = 0; _i < $.len(file!.Comments); _i++) { + const group = file!.Comments![_i] + { + + // after package declaration + + // opt: check Contains first to avoid unnecessary array allocation in Split. + for (let _i = 0; _i < $.len(group!.List); _i++) { + const comment = group!.List![_i] + { + + // after package declaration + if (comment!.Pos() > file!.Package) { + break + } + // opt: check Contains first to avoid unnecessary array allocation in Split. + let prefix: string = "// Code generated " + if (strings.Contains(comment!.Text, "// Code generated ")) { + for (let _i = 0; _i < $.len(strings.Split(comment!.Text, "\n")); _i++) { + const line = strings.Split(comment!.Text, "\n")![_i] + { + { + let [rest, ok] = strings.CutPrefix(line, "// Code generated ") + if (ok) { + { + let [gen, ok] = strings.CutSuffix(rest, " DO NOT EDIT.") + if (ok) { + return [gen, true] + } + } + } + } + } + } + } + } + } + } + } + return ["", false] +} + +// Unparen returns the expression with any enclosing parentheses removed. +export function Unparen(e: Expr): Expr { + for (; ; ) { + let { value: paren, ok: ok } = $.typeAssert(e, {kind: $.TypeKind.Pointer, elemType: 'ParenExpr'}) + if (!ok) { + return e + } + e = paren!.X + } +} + diff --git a/compliance/deps/go/ast/commentmap.gs.ts b/compliance/deps/go/ast/commentmap.gs.ts new file mode 100644 index 00000000..c1eb87c6 --- /dev/null +++ b/compliance/deps/go/ast/commentmap.gs.ts @@ -0,0 +1,507 @@ +import * as $ from "@goscript/builtin/index.js" +import { Inspect } from "./walk.gs.js"; +import { Comment, CommentGroup, Decl, Field, File, Ident, Node, Spec, Stmt } from "./ast.gs.js"; + +import * as bytes from "@goscript/bytes/index.js" + +import * as cmp from "@goscript/cmp/index.js" + +import * as fmt from "@goscript/fmt/index.js" + +import * as token from "@goscript/go/token/index.js" + +import * as slices from "@goscript/slices/index.js" + +import * as strings from "@goscript/strings/index.js" + +export type CommentMap = Map> | null; + +export function CommentMap_addComment(cmap: CommentMap, n: Node, c: CommentGroup | null): void { + let list = $.mapGet(cmap, n, null)[0] + if ($.len(list) == 0) { + list = $.arrayToSlice([c]) + } + else { + list = $.append(list, c) + } + $.mapSet(cmap, n, list) +} + +export function CommentMap_Update(cmap: CommentMap, old: Node, _new: Node): Node { + { + let list = $.mapGet(cmap, old, null)[0] + if ($.len(list) > 0) { + $.deleteMapEntry(cmap, old) + $.mapSet(cmap, _new, $.append($.mapGet(cmap, _new, null)[0], list)) + } + } + return _new +} + +export function CommentMap_Filter(cmap: CommentMap, node: Node): CommentMap { + let umap = $.makeMap>() + Inspect(node, (n: Node): boolean => { + { + let g = $.mapGet(cmap, n, null)[0] + if ($.len(g) > 0) { + $.mapSet(umap, n, g) + } + } + return true + }) + return umap +} + +export function CommentMap_Comments(cmap: CommentMap): $.Slice { + let list = $.makeSlice(0, $.len(cmap)) + for (const [_k, e] of cmap?.entries() ?? []) { + { + list = $.append(list, e) + } + } + sortComments(list) + return list +} + +export function CommentMap_String(cmap: CommentMap): string { + let nodes: $.Slice = null + for (const [node, _v] of cmap?.entries() ?? []) { + { + nodes = $.append(nodes, node) + } + } + slices.SortFunc(nodes, (a: Node, b: Node): number => { + let r = cmp.Compare(a!.Pos(), b!.Pos()) + if (r != 0) { + return r + } + return cmp.Compare(a!.End(), b!.End()) + }) + let buf: strings.Builder = new strings.Builder() + fmt.Fprintln(buf, "CommentMap {") + for (let _i = 0; _i < $.len(nodes); _i++) { + const node = nodes![_i] + { + let comment = $.mapGet(cmap, node, null)[0] + // print name of identifiers; print node type for other nodes + let s: string = "" + { + let { value: ident, ok: ok } = $.typeAssert(node, {kind: $.TypeKind.Pointer, elemType: 'Ident'}) + if (ok) { + s = ident!.Name + } + else { + s = fmt.Sprintf("%T", node) + } + } + fmt.Fprintf(buf, "\t%p %20s: %s\n", node, s, summary(comment)) + } + } + fmt.Fprintln(buf, "}") + return buf.String() +} + + +export class commentListReader { + public get fset(): token.FileSet | null { + return this._fields.fset.value + } + public set fset(value: token.FileSet | null) { + this._fields.fset.value = value + } + + public get list(): $.Slice { + return this._fields.list.value + } + public set list(value: $.Slice) { + this._fields.list.value = value + } + + public get index(): number { + return this._fields.index.value + } + public set index(value: number) { + this._fields.index.value = value + } + + // comment group at current index + public get comment(): CommentGroup | null { + return this._fields.comment.value + } + public set comment(value: CommentGroup | null) { + this._fields.comment.value = value + } + + // source interval of comment group at current index + public get pos(): token.Position { + return this._fields.pos.value + } + public set pos(value: token.Position) { + this._fields.pos.value = value + } + + // source interval of comment group at current index + public get end(): token.Position { + return this._fields.end.value + } + public set end(value: token.Position) { + this._fields.end.value = value + } + + public _fields: { + fset: $.VarRef; + list: $.VarRef<$.Slice>; + index: $.VarRef; + comment: $.VarRef; + pos: $.VarRef; + end: $.VarRef; + } + + constructor(init?: Partial<{comment?: CommentGroup | null, end?: token.Position, fset?: token.FileSet | null, index?: number, list?: $.Slice, pos?: token.Position}>) { + this._fields = { + fset: $.varRef(init?.fset ?? null), + list: $.varRef(init?.list ?? null), + index: $.varRef(init?.index ?? 0), + comment: $.varRef(init?.comment ?? null), + pos: $.varRef(init?.pos ? $.markAsStructValue(init.pos.clone()) : new token.Position()), + end: $.varRef(init?.end ? $.markAsStructValue(init.end.clone()) : new token.Position()) + } + } + + public clone(): commentListReader { + const cloned = new commentListReader() + cloned._fields = { + fset: $.varRef(this._fields.fset.value ? $.markAsStructValue(this._fields.fset.value.clone()) : null), + list: $.varRef(this._fields.list.value), + index: $.varRef(this._fields.index.value), + comment: $.varRef(this._fields.comment.value ? $.markAsStructValue(this._fields.comment.value.clone()) : null), + pos: $.varRef($.markAsStructValue(this._fields.pos.value.clone())), + end: $.varRef($.markAsStructValue(this._fields.end.value.clone())) + } + return cloned + } + + public eol(): boolean { + const r = this + return r.index >= $.len(r.list) + } + + public next(): void { + const r = this + if (!r.eol()) { + r.comment = r.list![r.index] + r.pos = $.markAsStructValue(await r.fset!.Position(r.comment!.Pos()).clone()) + r.end = $.markAsStructValue(await r.fset!.Position(r.comment!.End()).clone()) + r.index++ + } + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'commentListReader', + new commentListReader(), + [{ name: "eol", args: [], returns: [{ type: { kind: $.TypeKind.Basic, name: "boolean" } }] }, { name: "next", args: [], returns: [] }], + commentListReader, + {"fset": { kind: $.TypeKind.Pointer, elemType: "FileSet" }, "list": { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "CommentGroup" } }, "index": { kind: $.TypeKind.Basic, name: "number" }, "comment": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }, "pos": "Position", "end": "Position"} + ); +} + +export type nodeStack = $.Slice; + +export function nodeStack_push(s: $.VarRef, n: Node): void { + s.pop(n!.Pos()) + s!.value = $.append((s!.value), n) +} + +export function nodeStack_pop(s: $.VarRef, pos: token.Pos): Node { + let i = $.len(s!.value) + for (; i > 0 && (s!.value)![i - 1]!.End() <= pos; ) { + top = (s!.value)![i - 1] + i-- + } + s!.value = $.goSlice((s!.value), 0, i) + return top +} + + +// sortComments sorts the list of comment groups in source order. +export function sortComments(list: $.Slice): void { + slices.SortFunc(list, (a: CommentGroup | null, b: CommentGroup | null): number => { + return cmp.Compare(a!.Pos(), b!.Pos()) + }) +} + +// nodeList returns the list of nodes of the AST n in source order. +export function nodeList(n: Node): $.Slice { + let list: $.Slice = null + + // don't collect comments + Inspect(n, (n: Node): boolean => { + // don't collect comments + $.typeSwitch(n, [{ types: ['nil', {kind: $.TypeKind.Pointer, elemType: 'CommentGroup'}, {kind: $.TypeKind.Pointer, elemType: 'Comment'}], body: () => { + return false + }}]) + list = $.append(list, n) + return true + }) + + // Note: The current implementation assumes that Inspect traverses the + // AST in depth-first and thus _source_ order. If AST traversal + // does not follow source order, the sorting call below will be + // required. + // slices.Sort(list, func(a, b Node) int { + // r := cmp.Compare(a.Pos(), b.Pos()) + // if r != 0 { + // return r + // } + // return cmp.Compare(a.End(), b.End()) + // }) + + return list +} + +// NewCommentMap creates a new comment map by associating comment groups +// of the comments list with the nodes of the AST specified by node. +// +// A comment group g is associated with a node n if: +// +// - g starts on the same line as n ends +// - g starts on the line immediately following n, and there is +// at least one empty line after g and before the next node +// - g starts before n and is not associated to the node before n +// via the previous rules +// +// NewCommentMap tries to associate a comment group to the "largest" +// node possible: For instance, if the comment is a line comment +// trailing an assignment, the comment is associated with the entire +// assignment rather than just the last operand in the assignment. +export function NewCommentMap(fset: token.FileSet | null, node: Node, comments: $.Slice): CommentMap { + + // no comments to map + if ($.len(comments) == 0) { + return null + } + + let cmap = $.makeMap>() + + // set up comment reader r + let tmp = $.makeSlice($.len(comments)) + $.copy(tmp, comments) // don't change incoming comments + sortComments(tmp) + let r = $.markAsStructValue(new commentListReader({fset: fset, list: tmp})) // !r.eol() because len(comments) > 0 + r.next() + + // create node list in lexical order + let nodes = nodeList(node) + nodes = $.append(nodes, null) // append sentinel + + // set up iteration variables + + // previous node + // end of p + // previous node group (enclosing nodes of "importance") + // end of pg + // stack of node groups + // previous node + let p: Node = null + // end of p + let pend: token.Position = new token.Position() + // previous node group (enclosing nodes of "importance") + let pg: Node = null + // end of pg + let pgend: token.Position = new token.Position() + // stack of node groups + let stack: nodeStack = null + + // current node position + + // set fake sentinel position to infinity so that + // all comments get processed before the sentinel + + // process comments before current node + + // determine recent node group + + // Try to associate a comment first with a node group + // (i.e., a node of "importance" such as a declaration); + // if that fails, try to associate it with the most recent + // node. + // TODO(gri) try to simplify the logic below + + // 1) comment starts on same line as previous node group ends, or + // 2) comment starts on the line immediately after the + // previous node group and there is an empty line before + // the current node + // => associate comment with previous node group + + // same rules apply as above for p rather than pg, + // but also associate with p if we are at the end (q == nil) + + // otherwise, associate comment with current node + + // we can only reach here if there was no p + // which would imply that there were no nodes + + // update previous node + + // update previous node group if we see an "important" node + for (let _i = 0; _i < $.len(nodes); _i++) { + const q = nodes![_i] + { + let qpos: token.Position = new token.Position() + + // current node position + + // set fake sentinel position to infinity so that + // all comments get processed before the sentinel + if (q != null) { + qpos = $.markAsStructValue(await fset!.Position(q!.Pos()).clone()) // current node position + } + else { + // set fake sentinel position to infinity so that + // all comments get processed before the sentinel + let infinity: number = (1 << 30) + qpos.Offset = 1073741824 + qpos.Line = 1073741824 + } + + // process comments before current node + + // determine recent node group + + // Try to associate a comment first with a node group + // (i.e., a node of "importance" such as a declaration); + // if that fails, try to associate it with the most recent + // node. + // TODO(gri) try to simplify the logic below + + // 1) comment starts on same line as previous node group ends, or + // 2) comment starts on the line immediately after the + // previous node group and there is an empty line before + // the current node + // => associate comment with previous node group + + // same rules apply as above for p rather than pg, + // but also associate with p if we are at the end (q == nil) + + // otherwise, associate comment with current node + + // we can only reach here if there was no p + // which would imply that there were no nodes + for (; r.end.Offset <= qpos.Offset; ) { + // determine recent node group + { + let top = nodeStack_pop(stack, r.comment!.Pos()) + if (top != null) { + pg = top + pgend = $.markAsStructValue(await fset!.Position(pg!.End()).clone()) + } + } + // Try to associate a comment first with a node group + // (i.e., a node of "importance" such as a declaration); + // if that fails, try to associate it with the most recent + // node. + // TODO(gri) try to simplify the logic below + let assoc: Node = null + + // 1) comment starts on same line as previous node group ends, or + // 2) comment starts on the line immediately after the + // previous node group and there is an empty line before + // the current node + // => associate comment with previous node group + + // same rules apply as above for p rather than pg, + // but also associate with p if we are at the end (q == nil) + + // otherwise, associate comment with current node + + // we can only reach here if there was no p + // which would imply that there were no nodes + switch (true) { + case pg != null && (pgend.Line == r.pos.Line || pgend.Line + 1 == r.pos.Line && r.end.Line + 1 < qpos.Line): + assoc = pg + break + case p != null && (pend.Line == r.pos.Line || pend.Line + 1 == r.pos.Line && r.end.Line + 1 < qpos.Line || q == null): + assoc = p + break + default: + if (q == null) { + // we can only reach here if there was no p + // which would imply that there were no nodes + $.panic("internal error: no comments should be associated with sentinel") + } + assoc = q + break + } + CommentMap_addComment(cmap, assoc, r.comment) + if (r.eol()) { + return cmap + } + r.next() + } + + // update previous node + p = q + pend = $.markAsStructValue(await fset!.Position(p!.End()).clone()) + + // update previous node group if we see an "important" node + $.typeSwitch(q, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'File'}, {kind: $.TypeKind.Pointer, elemType: 'Field'}, 'Decl', 'Spec', 'Stmt'], body: () => { + nodeStack_push(stack, q) + }}]) + } + } + + return cmap +} + +export function summary(list: $.Slice): string { + let maxLen: number = 40 + let buf: bytes.Buffer = new bytes.Buffer() + + // collect comments text + + // Note: CommentGroup.Text() does too much work for what we + // need and would only replace this innermost loop. + // Just do it explicitly. + loop: for (let _i = 0; _i < $.len(list); _i++) { + const group = list![_i] + { + // Note: CommentGroup.Text() does too much work for what we + // need and would only replace this innermost loop. + // Just do it explicitly. + for (let _i = 0; _i < $.len(group!.List); _i++) { + const comment = group!.List![_i] + { + if (buf.Len() >= 40) { + break + } + buf.WriteString(comment!.Text) + } + } + } + } + + // truncate if too long + if (buf.Len() > 40) { + buf.Truncate(40 - 3) + buf.WriteString("...") + } + + // replace any invisibles with blanks + let bytes = buf.Bytes() + for (let i = 0; i < $.len(bytes); i++) { + const b = bytes![i] + { + switch (b) { + case 9: + case 10: + case 13: + bytes![i] = 32 + break + } + } + } + + return $.bytesToString(bytes) +} + diff --git a/compliance/deps/go/ast/filter.gs.ts b/compliance/deps/go/ast/filter.gs.ts new file mode 100644 index 00000000..5b2fe637 --- /dev/null +++ b/compliance/deps/go/ast/filter.gs.ts @@ -0,0 +1,767 @@ +import * as $ from "@goscript/builtin/index.js" +import { IsExported, NewIdent } from "./ast.gs.js"; +import { ArrayType, ChanType, Comment, CommentGroup, CompositeLit, Decl, Expr, FieldList, File, FuncDecl, FuncType, GenDecl, Ident, ImportSpec, InterfaceType, KeyValueExpr, MapType, Package, ParenExpr, SelectorExpr, Spec, StarExpr, StructType, TypeSpec, ValueSpec } from "./ast.gs.js"; + +import * as token from "@goscript/go/token/index.js" + +import * as slices from "@goscript/slices/index.js" + +// If set, duplicate function declarations are excluded. +export let FilterFuncDuplicates: MergeMode = (1 << 0) + +// If set, comments that are not associated with a specific +// AST node (as Doc or Comment) are excluded. +export let FilterUnassociatedComments: MergeMode = 0 + +// If set, duplicate import declarations are excluded. +export let FilterImportDuplicates: MergeMode = 0 + +export type Filter = ((p0: string) => boolean) | null; + +export type MergeMode = number; + +let separator: Comment | null = new Comment({}) + +// exportFilter is a special filter function to extract exported nodes. +export function exportFilter(name: string): boolean { + return IsExported(name) +} + +// FileExports trims the AST for a Go source file in place such that +// only exported nodes remain: all top-level identifiers which are not exported +// and their associated information (such as type, initial value, or function +// body) are removed. Non-exported fields and methods of exported types are +// stripped. The [File.Comments] list is not changed. +// +// FileExports reports whether there are exported declarations. +export function FileExports(src: File | null): boolean { + return filterFile(src, exportFilter, true) +} + +// PackageExports trims the AST for a Go package in place such that +// only exported nodes remain. The pkg.Files list is not changed, so that +// file names and top-level package comments don't get lost. +// +// PackageExports reports whether there are exported declarations; +// it returns false otherwise. +export function PackageExports(pkg: Package | null): boolean { + return filterPackage(pkg, exportFilter, true) +} + +export function filterIdentList(list: $.Slice, f: Filter | null): $.Slice { + let j = 0 + for (let _i = 0; _i < $.len(list); _i++) { + const x = list![_i] + { + if (f!(x!.Name)) { + list![j] = x + j++ + } + } + } + return $.goSlice(list, 0, j) +} + +// fieldName assumes that x is the type of an anonymous field and +// returns the corresponding field name. If x is not an acceptable +// anonymous field, the result is nil. +export function fieldName(x: Expr): Ident | null { + $.typeSwitch(x, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'Ident'}], body: (t) => { + return t + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'SelectorExpr'}], body: (t) => { + { + let { ok: ok } = $.typeAssert(t!.X, {kind: $.TypeKind.Pointer, elemType: 'Ident'}) + if (ok) { + return t!.Sel + } + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'StarExpr'}], body: (t) => { + return fieldName(t!.X) + }}]) + return null +} + +export function filterFieldList(fields: FieldList | null, filter: Filter | null, _export: boolean): boolean { + let removedFields: boolean = false + { + if (fields == null) { + return false + } + let list = fields!.List + let j = 0 + + // anonymous field + for (let _i = 0; _i < $.len(list); _i++) { + const f = list![_i] + { + let keepField = false + + // anonymous field + if ($.len(f!.Names) == 0) { + // anonymous field + let name = fieldName(f!.Type) + keepField = name != null && filter!(name!.Name) + } + else { + let n = $.len(f!.Names) + f!.Names = filterIdentList(f!.Names, filter) + if ($.len(f!.Names) < n) { + removedFields = true + } + keepField = $.len(f!.Names) > 0 + } + if (keepField) { + if (_export) { + filterType(f!.Type, filter, _export) + } + list![j] = f + j++ + } + } + } + if (j < $.len(list)) { + removedFields = true + } + fields!.List = $.goSlice(list, 0, j) + return removedFields + } +} + +export function filterCompositeLit(lit: CompositeLit | null, filter: Filter | null, _export: boolean): void { + let n = $.len(lit!.Elts) + lit!.Elts = filterExprList(lit!.Elts, filter, _export) + if ($.len(lit!.Elts) < n) { + lit!.Incomplete = true + } +} + +export function filterExprList(list: $.Slice, filter: Filter | null, _export: boolean): $.Slice { + let j = 0 + for (let _i = 0; _i < $.len(list); _i++) { + const exp = list![_i] + { + $.typeSwitch(exp, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'CompositeLit'}], body: (x) => { + filterCompositeLit(x, filter, _export) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'KeyValueExpr'}], body: (x) => { + const _temp_x = x + { + let { value: x, ok: ok } = $.typeAssert(_temp_x.Key, {kind: $.TypeKind.Pointer, elemType: 'Ident'}) + if (ok && !filter!(x!.Name)) { + continue + } + } + const _temp_x = x + { + let { value: x, ok: ok } = $.typeAssert(_temp_x.Value, {kind: $.TypeKind.Pointer, elemType: 'CompositeLit'}) + if (ok) { + filterCompositeLit(x, filter, _export) + } + } + }}]) + list![j] = exp + j++ + } + } + return $.goSlice(list, 0, j) +} + +export function filterParamList(fields: FieldList | null, filter: Filter | null, _export: boolean): boolean { + if (fields == null) { + return false + } + let b: boolean = false + for (let _i = 0; _i < $.len(fields!.List); _i++) { + const f = fields!.List![_i] + { + if (filterType(f!.Type, filter, _export)) { + b = true + } + } + } + return b +} + +export function filterType(typ: Expr, f: Filter | null, _export: boolean): boolean { + $.typeSwitch(typ, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'Ident'}], body: (t) => { + return f!(t!.Name) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ParenExpr'}], body: (t) => { + return filterType(t!.X, f, _export) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ArrayType'}], body: (t) => { + return filterType(t!.Elt, f, _export) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'StructType'}], body: (t) => { + if (filterFieldList(t!.Fields, f, _export)) { + t!.Incomplete = true + } + return $.len(t!.Fields!.List) > 0 + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'FuncType'}], body: (t) => { + let b1 = filterParamList(t!.Params, f, _export) + let b2 = filterParamList(t!.Results, f, _export) + return b1 || b2 + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'InterfaceType'}], body: (t) => { + if (filterFieldList(t!.Methods, f, _export)) { + t!.Incomplete = true + } + return $.len(t!.Methods!.List) > 0 + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'MapType'}], body: (t) => { + let b1 = filterType(t!.Key, f, _export) + let b2 = filterType(t!.Value, f, _export) + return b1 || b2 + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ChanType'}], body: (t) => { + return filterType(t!.Value, f, _export) + }}]) + return false +} + +export function filterSpec(spec: Spec, f: Filter | null, _export: boolean): boolean { + + // For general filtering (not just exports), + // filter type even if name is not filtered + // out. + // If the type contains filtered elements, + // keep the declaration. + $.typeSwitch(spec, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'ValueSpec'}], body: (s) => { + s!.Names = filterIdentList(s!.Names, f) + s!.Values = filterExprList(s!.Values, f, _export) + if ($.len(s!.Names) > 0) { + if (_export) { + filterType(s!.Type, f, _export) + } + return true + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'TypeSpec'}], body: (s) => { + if (f!(s!.Name!.Name)) { + if (_export) { + filterType(s!.Type, f, _export) + } + return true + } + if (!_export) { + // For general filtering (not just exports), + // filter type even if name is not filtered + // out. + // If the type contains filtered elements, + // keep the declaration. + return filterType(s!.Type, f, _export) + } + }}]) + return false +} + +export function filterSpecList(list: $.Slice, f: Filter | null, _export: boolean): $.Slice { + let j = 0 + for (let _i = 0; _i < $.len(list); _i++) { + const s = list![_i] + { + if (filterSpec(s, f, _export)) { + list![j] = s + j++ + } + } + } + return $.goSlice(list, 0, j) +} + +// FilterDecl trims the AST for a Go declaration in place by removing +// all names (including struct field and interface method names, but +// not from parameter lists) that don't pass through the filter f. +// +// FilterDecl reports whether there are any declared names left after +// filtering. +export function FilterDecl(decl: Decl, f: Filter | null): boolean { + return filterDecl(decl, f, false) +} + +export function filterDecl(decl: Decl, f: Filter | null, _export: boolean): boolean { + $.typeSwitch(decl, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'GenDecl'}], body: (d) => { + d!.Specs = filterSpecList(d!.Specs, f, _export) + return $.len(d!.Specs) > 0 + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'FuncDecl'}], body: (d) => { + return f!(d!.Name!.Name) + }}]) + return false +} + +// FilterFile trims the AST for a Go file in place by removing all +// names from top-level declarations (including struct field and +// interface method names, but not from parameter lists) that don't +// pass through the filter f. If the declaration is empty afterwards, +// the declaration is removed from the AST. Import declarations are +// always removed. The [File.Comments] list is not changed. +// +// FilterFile reports whether there are any top-level declarations +// left after filtering. +export function FilterFile(src: File | null, f: Filter | null): boolean { + return filterFile(src, f, false) +} + +export function filterFile(src: File | null, f: Filter | null, _export: boolean): boolean { + let j = 0 + for (let _i = 0; _i < $.len(src!.Decls); _i++) { + const d = src!.Decls![_i] + { + if (filterDecl(d, f, _export)) { + src!.Decls![j] = d + j++ + } + } + } + src!.Decls = $.goSlice(src!.Decls, 0, j) + return j > 0 +} + +// FilterPackage trims the AST for a Go package in place by removing +// all names from top-level declarations (including struct field and +// interface method names, but not from parameter lists) that don't +// pass through the filter f. If the declaration is empty afterwards, +// the declaration is removed from the AST. The pkg.Files list is not +// changed, so that file names and top-level package comments don't get +// lost. +// +// FilterPackage reports whether there are any top-level declarations +// left after filtering. +export function FilterPackage(pkg: Package | null, f: Filter | null): boolean { + return filterPackage(pkg, f, false) +} + +export function filterPackage(pkg: Package | null, f: Filter | null, _export: boolean): boolean { + let hasDecls = false + for (const [_k, src] of pkg!.Files?.entries() ?? []) { + { + if (filterFile(src, f, _export)) { + hasDecls = true + } + } + } + return hasDecls +} + +// nameOf returns the function (foo) or method name (foo.bar) for +// the given function declaration. If the AST is incorrect for the +// receiver, it assumes a function instead. +export function nameOf(f: FuncDecl | null): string { + + // looks like a correct receiver declaration + + // dereference pointer receiver types + + // the receiver type must be a type name + + // otherwise assume a function instead + { + let r = f!.Recv + if (r != null && $.len(r!.List) == 1) { + // looks like a correct receiver declaration + let t = r!.List![0]!.Type + // dereference pointer receiver types + { + let { value: p } = $.typeAssert(t, {kind: $.TypeKind.Pointer, elemType: 'StarExpr'}) + if (p != null) { + t = p!.X + } + } + // the receiver type must be a type name + + // otherwise assume a function instead + { + let { value: p } = $.typeAssert(t, {kind: $.TypeKind.Pointer, elemType: 'Ident'}) + if (p != null) { + return p!.Name + "." + f!.Name!.Name + } + } + // otherwise assume a function instead + } + } + return f!.Name!.Name +} + +// MergePackageFiles creates a file AST by merging the ASTs of the +// files belonging to a package. The mode flags control merging behavior. +export function MergePackageFiles(pkg: Package | null, mode: MergeMode): File | null { + // Count the number of package docs, comments and declarations across + // all package files. Also, compute sorted list of filenames, so that + // subsequent iterations can always iterate in the same order. + let ndocs = 0 + let ncomments = 0 + let ndecls = 0 + let filenames = $.makeSlice($.len(pkg!.Files), undefined, 'string') + let minPos: token.Pos = 0 + let maxPos: token.Pos = 0 + let i = 0 + + // +1 for separator + for (const [filename, f] of pkg!.Files?.entries() ?? []) { + { + filenames![i] = filename + i++ + + // +1 for separator + if (f!.Doc != null) { + ndocs += $.len(f!.Doc!.List) + 1 // +1 for separator + } + ncomments += $.len(f!.Comments) + ndecls += $.len(f!.Decls) + if (i == 0 || f!.FileStart < minPos) { + minPos = f!.FileStart + } + if (i == 0 || f!.FileEnd > maxPos) { + maxPos = f!.FileEnd + } + } + } + slices.Sort(filenames) + + // Collect package comments from all package files into a single + // CommentGroup - the collected package documentation. In general + // there should be only one file with a package comment; but it's + // better to collect extra comments than drop them on the floor. + let doc: CommentGroup | null = null + let pos: token.Pos = 0 + + // -1: no separator before first group + + // not the first group - add separator + + // Keep the maximum package clause position as + // position for the package clause of the merged + // files. + if (ndocs > 0) { + let list = $.makeSlice(ndocs - 1) // -1: no separator before first group + let i = 0 + + // not the first group - add separator + + // Keep the maximum package clause position as + // position for the package clause of the merged + // files. + for (let _i = 0; _i < $.len(filenames); _i++) { + const filename = filenames![_i] + { + let f = $.mapGet(pkg!.Files, filename, null)[0] + + // not the first group - add separator + + // Keep the maximum package clause position as + // position for the package clause of the merged + // files. + if (f!.Doc != null) { + + // not the first group - add separator + if (i > 0) { + // not the first group - add separator + list![i] = separator + i++ + } + for (let _i = 0; _i < $.len(f!.Doc!.List); _i++) { + const c = f!.Doc!.List![_i] + { + list![i] = c + i++ + } + } + + // Keep the maximum package clause position as + // position for the package clause of the merged + // files. + if (f!.Package > pos) { + // Keep the maximum package clause position as + // position for the package clause of the merged + // files. + pos = f!.Package + } + } + } + } + doc = new CommentGroup({}) + } + + // Collect declarations from all package files. + let decls: $.Slice = null + + // map of func name -> decls index + // current index + // number of filtered entries + + // A language entity may be declared multiple + // times in different package files; only at + // build time declarations must be unique. + // For now, exclude multiple declarations of + // functions - keep the one with documentation. + // + // TODO(gri): Expand this filtering to other + // entities (const, type, vars) if + // multiple declarations are common. + + // function declared already + + // existing declaration has no documentation; + // ignore the existing declaration + + // ignore the new declaration + + // filtered an entry + + // Eliminate nil entries from the decls list if entries were + // filtered. We do this using a 2nd pass in order to not disturb + // the original declaration order in the source (otherwise, this + // would also invalidate the monotonically increasing position + // info within a single file). + if (ndecls > 0) { + decls = $.makeSlice(ndecls) + let funcs = $.makeMap() // map of func name -> decls index + let i = 0 // current index + let n = 0 // number of filtered entries + + // A language entity may be declared multiple + // times in different package files; only at + // build time declarations must be unique. + // For now, exclude multiple declarations of + // functions - keep the one with documentation. + // + // TODO(gri): Expand this filtering to other + // entities (const, type, vars) if + // multiple declarations are common. + + // function declared already + + // existing declaration has no documentation; + // ignore the existing declaration + + // ignore the new declaration + + // filtered an entry + for (let _i = 0; _i < $.len(filenames); _i++) { + const filename = filenames![_i] + { + let f = $.mapGet(pkg!.Files, filename, null)[0] + + // A language entity may be declared multiple + // times in different package files; only at + // build time declarations must be unique. + // For now, exclude multiple declarations of + // functions - keep the one with documentation. + // + // TODO(gri): Expand this filtering to other + // entities (const, type, vars) if + // multiple declarations are common. + + // function declared already + + // existing declaration has no documentation; + // ignore the existing declaration + + // ignore the new declaration + + // filtered an entry + for (let _i = 0; _i < $.len(f!.Decls); _i++) { + const d = f!.Decls![_i] + { + + // A language entity may be declared multiple + // times in different package files; only at + // build time declarations must be unique. + // For now, exclude multiple declarations of + // functions - keep the one with documentation. + // + // TODO(gri): Expand this filtering to other + // entities (const, type, vars) if + // multiple declarations are common. + + // function declared already + + // existing declaration has no documentation; + // ignore the existing declaration + + // ignore the new declaration + + // filtered an entry + if ((mode & 1) != 0) { + // A language entity may be declared multiple + // times in different package files; only at + // build time declarations must be unique. + // For now, exclude multiple declarations of + // functions - keep the one with documentation. + // + // TODO(gri): Expand this filtering to other + // entities (const, type, vars) if + // multiple declarations are common. + + // function declared already + + // existing declaration has no documentation; + // ignore the existing declaration + + // ignore the new declaration + + // filtered an entry + { + let { value: f, ok: isFun } = $.typeAssert(d, {kind: $.TypeKind.Pointer, elemType: 'FuncDecl'}) + if (isFun) { + let name = nameOf(f) + + // function declared already + + // existing declaration has no documentation; + // ignore the existing declaration + + // ignore the new declaration + + // filtered an entry + { + let [j, exists] = $.mapGet(funcs, name, 0) + if (exists) { + // function declared already + + // existing declaration has no documentation; + // ignore the existing declaration + + // ignore the new declaration + if (decls![j] != null && $.mustTypeAssert(decls![j], {kind: $.TypeKind.Pointer, elemType: 'FuncDecl'})!.Doc == null) { + // existing declaration has no documentation; + // ignore the existing declaration + decls![j] = null + } + else { + // ignore the new declaration + d = null + } + n++ + } + else { + $.mapSet(funcs, name, i) + } + } + } + } + } + decls![i] = d + i++ + } + } + } + } + + // Eliminate nil entries from the decls list if entries were + // filtered. We do this using a 2nd pass in order to not disturb + // the original declaration order in the source (otherwise, this + // would also invalidate the monotonically increasing position + // info within a single file). + if (n > 0) { + i = 0 + for (let _i = 0; _i < $.len(decls); _i++) { + const d = decls![_i] + { + if (d != null) { + decls![i] = d + i++ + } + } + } + decls = $.goSlice(decls, 0, i) + } + } + + // Collect import specs from all package files. + let imports: $.Slice = null + + // TODO: consider handling cases where: + // - 2 imports exist with the same import path but + // have different local names (one should probably + // keep both of them) + // - 2 imports exist but only one has a comment + // - 2 imports exist and they both have (possibly + // different) comments + + // Iterate over filenames for deterministic order. + if ((mode & 4) != 0) { + let seen = $.makeMap() + + // TODO: consider handling cases where: + // - 2 imports exist with the same import path but + // have different local names (one should probably + // keep both of them) + // - 2 imports exist but only one has a comment + // - 2 imports exist and they both have (possibly + // different) comments + for (let _i = 0; _i < $.len(filenames); _i++) { + const filename = filenames![_i] + { + let f = $.mapGet(pkg!.Files, filename, null)[0] + + // TODO: consider handling cases where: + // - 2 imports exist with the same import path but + // have different local names (one should probably + // keep both of them) + // - 2 imports exist but only one has a comment + // - 2 imports exist and they both have (possibly + // different) comments + for (let _i = 0; _i < $.len(f!.Imports); _i++) { + const imp = f!.Imports![_i] + { + + // TODO: consider handling cases where: + // - 2 imports exist with the same import path but + // have different local names (one should probably + // keep both of them) + // - 2 imports exist but only one has a comment + // - 2 imports exist and they both have (possibly + // different) comments + { + let path = imp!.Path!.Value + if (!$.mapGet(seen, path, false)[0]) { + // TODO: consider handling cases where: + // - 2 imports exist with the same import path but + // have different local names (one should probably + // keep both of them) + // - 2 imports exist but only one has a comment + // - 2 imports exist and they both have (possibly + // different) comments + imports = $.append(imports, imp) + $.mapSet(seen, path, true) + } + } + } + } + } + } + } + else { + // Iterate over filenames for deterministic order. + for (let _i = 0; _i < $.len(filenames); _i++) { + const filename = filenames![_i] + { + let f = $.mapGet(pkg!.Files, filename, null)[0] + imports = $.append(imports, f!.Imports) + } + } + } + + // Collect comments from all package files. + let comments: $.Slice = null + if ((mode & 2) == 0) { + comments = $.makeSlice(ncomments) + let i = 0 + for (let _i = 0; _i < $.len(filenames); _i++) { + const filename = filenames![_i] + { + let f = $.mapGet(pkg!.Files, filename, null)[0] + i += $.copy($.goSlice(comments, i, undefined), f!.Comments) + } + } + } + + // TODO(gri) need to compute unresolved identifiers! + return new File({}) +} + diff --git a/compliance/deps/go/ast/import.gs.ts b/compliance/deps/go/ast/import.gs.ts new file mode 100644 index 00000000..a97e6a46 --- /dev/null +++ b/compliance/deps/go/ast/import.gs.ts @@ -0,0 +1,418 @@ +import * as $ from "@goscript/builtin/index.js" +import { CommentGroup, File, GenDecl, ImportSpec, Spec } from "./ast.gs.js"; + +import * as cmp from "@goscript/cmp/index.js" + +import * as token from "@goscript/go/token/index.js" + +import * as slices from "@goscript/slices/index.js" + +import * as strconv from "@goscript/strconv/index.js" + +export class cgPos { + // true if comment is to the left of the spec, false otherwise. + public get left(): boolean { + return this._fields.left.value + } + public set left(value: boolean) { + this._fields.left.value = value + } + + public get cg(): CommentGroup | null { + return this._fields.cg.value + } + public set cg(value: CommentGroup | null) { + this._fields.cg.value = value + } + + public _fields: { + left: $.VarRef; + cg: $.VarRef; + } + + constructor(init?: Partial<{cg?: CommentGroup | null, left?: boolean}>) { + this._fields = { + left: $.varRef(init?.left ?? false), + cg: $.varRef(init?.cg ?? null) + } + } + + public clone(): cgPos { + const cloned = new cgPos() + cloned._fields = { + left: $.varRef(this._fields.left.value), + cg: $.varRef(this._fields.cg.value ? $.markAsStructValue(this._fields.cg.value.clone()) : null) + } + return cloned + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'cgPos', + new cgPos(), + [], + cgPos, + {"left": { kind: $.TypeKind.Basic, name: "boolean" }, "cg": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }} + ); +} + +export class posSpan { + public get Start(): token.Pos { + return this._fields.Start.value + } + public set Start(value: token.Pos) { + this._fields.Start.value = value + } + + public get End(): token.Pos { + return this._fields.End.value + } + public set End(value: token.Pos) { + this._fields.End.value = value + } + + public _fields: { + Start: $.VarRef; + End: $.VarRef; + } + + constructor(init?: Partial<{End?: token.Pos, Start?: token.Pos}>) { + this._fields = { + Start: $.varRef(init?.Start ?? 0 as token.Pos), + End: $.varRef(init?.End ?? 0 as token.Pos) + } + } + + public clone(): posSpan { + const cloned = new posSpan() + cloned._fields = { + Start: $.varRef(this._fields.Start.value), + End: $.varRef(this._fields.End.value) + } + return cloned + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'posSpan', + new posSpan(), + [], + posSpan, + {"Start": "Pos", "End": "Pos"} + ); +} + +// SortImports sorts runs of consecutive import lines in import blocks in f. +// It also removes duplicate imports when it is possible to do so without data loss. +export function SortImports(fset: token.FileSet | null, f: File | null): void { + + // Not an import declaration, so we're done. + // Imports are always first. + + // Not a block: sorted by default. + + // Identify and sort runs of specs on successive lines. + + // j begins a new run. End this one. + + // Deduping can leave a blank line before the rparen; clean that up. + for (let _i = 0; _i < $.len(f!.Decls); _i++) { + const d = f!.Decls![_i] + { + let { value: d, ok: ok } = $.typeAssert(d, {kind: $.TypeKind.Pointer, elemType: 'GenDecl'}) + + // Not an import declaration, so we're done. + // Imports are always first. + if (!ok || d!.Tok != token.IMPORT) { + // Not an import declaration, so we're done. + // Imports are always first. + break + } + + // Not a block: sorted by default. + if (!token.Pos_IsValid(d!.Lparen)) { + // Not a block: sorted by default. + continue + } + + // Identify and sort runs of specs on successive lines. + let i = 0 + let specs = $.goSlice(d!.Specs, undefined, 0) + + // j begins a new run. End this one. + for (let j = 0; j < $.len(d!.Specs); j++) { + const s = d!.Specs![j] + { + + // j begins a new run. End this one. + if (j > i && lineAt(fset, s!.Pos()) > 1 + lineAt(fset, d!.Specs![j - 1]!.End())) { + // j begins a new run. End this one. + specs = $.append(specs, sortSpecs(fset, f, $.goSlice(d!.Specs, i, j))) + i = j + } + } + } + specs = $.append(specs, sortSpecs(fset, f, $.goSlice(d!.Specs, i, undefined))) + d!.Specs = specs + + // Deduping can leave a blank line before the rparen; clean that up. + if ($.len(d!.Specs) > 0) { + let lastSpec = d!.Specs![$.len(d!.Specs) - 1] + let lastLine = lineAt(fset, lastSpec!.Pos()) + let rParenLine = lineAt(fset, d!.Rparen) + for (; rParenLine > lastLine + 1; ) { + rParenLine-- + (await fset!.File(d!.Rparen))!.MergeLine(rParenLine) + } + } + } + } + + // Make File.Imports order consistent. + f!.Imports = $.goSlice(f!.Imports, undefined, 0) + for (let _i = 0; _i < $.len(f!.Decls); _i++) { + const decl = f!.Decls![_i] + { + const _temp_decl = decl + { + let { value: decl, ok: ok } = $.typeAssert(_temp_decl, {kind: $.TypeKind.Pointer, elemType: 'GenDecl'}) + if (ok && decl!.Tok == token.IMPORT) { + for (let _i = 0; _i < $.len(decl!.Specs); _i++) { + const spec = decl!.Specs![_i] + { + f!.Imports = $.append(f!.Imports, $.mustTypeAssert(spec, {kind: $.TypeKind.Pointer, elemType: 'ImportSpec'})) + } + } + } + } + } + } +} + +export function lineAt(fset: token.FileSet | null, pos: token.Pos): number { + return (await fset!.PositionFor(pos, false))!.Line +} + +export function importPath(s: Spec): string { + let [t, err] = strconv.Unquote($.mustTypeAssert(s, {kind: $.TypeKind.Pointer, elemType: 'ImportSpec'})!.Path!.Value) + if (err == null) { + return t + } + return "" +} + +export function importName(s: Spec): string { + let n = $.mustTypeAssert(s, {kind: $.TypeKind.Pointer, elemType: 'ImportSpec'})!.Name + if (n == null) { + return "" + } + return n!.Name +} + +export function importComment(s: Spec): string { + let c = $.mustTypeAssert(s, {kind: $.TypeKind.Pointer, elemType: 'ImportSpec'})!.Comment + if (c == null) { + return "" + } + return c!.Text() +} + +// collapse indicates whether prev may be removed, leaving only next. +export function collapse(prev: Spec, next: Spec): boolean { + if (importPath(next) != importPath(prev) || importName(next) != importName(prev)) { + return false + } + return $.mustTypeAssert(prev, {kind: $.TypeKind.Pointer, elemType: 'ImportSpec'})!.Comment == null +} + +export function sortSpecs(fset: token.FileSet | null, f: File | null, specs: $.Slice): $.Slice { + // Can't short-circuit here even if specs are already sorted, + // since they might yet need deduplication. + // A lone import, however, may be safely ignored. + if ($.len(specs) <= 1) { + return specs + } + + // Record positions for specs. + let pos = $.makeSlice($.len(specs)) + for (let i = 0; i < $.len(specs); i++) { + const s = specs![i] + { + pos![i] = $.markAsStructValue(new posSpan({})) + } + } + + // Identify comments in this range. + let begSpecs = pos![0].Start + let endSpecs = pos![$.len(pos) - 1].End + let beg = (await fset!.File(begSpecs))!.LineStart(lineAt(fset, begSpecs)) + let endLine = lineAt(fset, endSpecs) + let endFile = await fset!.File(endSpecs) + let end: token.Pos = 0 + + // beginning of next line + if (endLine == await endFile!.LineCount()) { + end = endSpecs + } + else { + end = await endFile!.LineStart(endLine + 1) // beginning of next line + } + let first = $.len(f!.Comments) + let last = -1 + + // g.End() < end + + // comment is within the range [beg, end[ of import declarations + for (let i = 0; i < $.len(f!.Comments); i++) { + const g = f!.Comments![i] + { + if (g!.End() >= end) { + break + } + // g.End() < end + + // comment is within the range [beg, end[ of import declarations + if (beg <= g!.Pos()) { + // comment is within the range [beg, end[ of import declarations + if (i < first) { + first = i + } + if (i > last) { + last = i + } + } + } + } + + let comments: $.Slice = null + if (last >= 0) { + comments = $.goSlice(f!.Comments, first, last + 1) + } + + // Assign each comment to the import spec on the same line. + let importComments = new Map([]) + let specIndex = 0 + + // A block comment can appear before the first import spec. + + // Or it can appear on the left of an import spec. + for (let _i = 0; _i < $.len(comments); _i++) { + const g = comments![_i] + { + for (; specIndex + 1 < $.len(specs) && pos![specIndex + 1].Start <= g!.Pos(); ) { + specIndex++ + } + let left: boolean = false + // A block comment can appear before the first import spec. + + // Or it can appear on the left of an import spec. + if (specIndex == 0 && pos![specIndex].Start > g!.Pos()) { + left = true + } + else if (specIndex + 1 < $.len(specs) && lineAt(fset, pos![specIndex].Start) + 1 == lineAt(fset, g!.Pos())) { + specIndex++ + left = true + } + let s = $.mustTypeAssert(specs![specIndex], {kind: $.TypeKind.Pointer, elemType: 'ImportSpec'}) + $.mapSet(importComments, s, $.append($.mapGet(importComments, s, null)[0], $.markAsStructValue(new cgPos({cg: g, left: left})))) + } + } + + // Sort the import specs by import path. + // Remove duplicates, when possible without data loss. + // Reassign the import paths to have the same position sequence. + // Reassign each comment to the spec on the same line. + // Sort the comments by new position. + slices.SortFunc(specs, (a: Spec, b: Spec): number => { + let ipath = importPath(a) + let jpath = importPath(b) + let r = cmp.Compare(ipath, jpath) + if (r != 0) { + return r + } + let iname = importName(a) + let jname = importName(b) + r = cmp.Compare(iname, jname) + if (r != 0) { + return r + } + return cmp.Compare(importComment(a), importComment(b)) + }) + + // Dedup. Thanks to our sorting, we can just consider + // adjacent pairs of imports. + let deduped = $.goSlice(specs, undefined, 0) + for (let i = 0; i < $.len(specs); i++) { + const s = specs![i] + { + if (i == $.len(specs) - 1 || !collapse(s, specs![i + 1])) { + deduped = $.append(deduped, s) + } + else { + let p = s!.Pos() + (await fset!.File(p))!.MergeLine(lineAt(fset, p)) + } + } + } + specs = deduped + + // Fix up comment positions + + // An import spec can have both block comment and a line comment + // to its right. In that case, both of them will have the same pos. + // But while formatting the AST, the line comment gets moved to + // after the block comment. + for (let i = 0; i < $.len(specs); i++) { + const s = specs![i] + { + let s = $.mustTypeAssert(s, {kind: $.TypeKind.Pointer, elemType: 'ImportSpec'}) + if (s!.Name != null) { + s!.Name!.NamePos = pos![i].Start + } + s!.Path!.ValuePos = pos![i].Start + s!.EndPos = pos![i].End + + // An import spec can have both block comment and a line comment + // to its right. In that case, both of them will have the same pos. + // But while formatting the AST, the line comment gets moved to + // after the block comment. + for (let _i = 0; _i < $.len($.mapGet(importComments, s, null)[0]); _i++) { + const g = $.mapGet(importComments, s, null)[0]![_i] + { + + // An import spec can have both block comment and a line comment + // to its right. In that case, both of them will have the same pos. + // But while formatting the AST, the line comment gets moved to + // after the block comment. + for (let _i = 0; _i < $.len(g.cg!.List); _i++) { + const c = g.cg!.List![_i] + { + + // An import spec can have both block comment and a line comment + // to its right. In that case, both of them will have the same pos. + // But while formatting the AST, the line comment gets moved to + // after the block comment. + if (g.left) { + c!.Slash = pos![i].Start - 1 + } + else { + // An import spec can have both block comment and a line comment + // to its right. In that case, both of them will have the same pos. + // But while formatting the AST, the line comment gets moved to + // after the block comment. + c!.Slash = pos![i].End + } + } + } + } + } + } + } + + slices.SortFunc(comments, (a: CommentGroup | null, b: CommentGroup | null): number => { + return cmp.Compare(a!.Pos(), b!.Pos()) + }) + + return specs +} + diff --git a/compliance/deps/go/ast/index.ts b/compliance/deps/go/ast/index.ts new file mode 100644 index 00000000..afc20a5b --- /dev/null +++ b/compliance/deps/go/ast/index.ts @@ -0,0 +1,17 @@ +export { IsExported, IsGenerated, NewIdent, RECV, SEND, Unparen } from "./ast.gs.js" +export { ArrayType, AssignStmt, BadDecl, BadExpr, BadStmt, BasicLit, BinaryExpr, BlockStmt, BranchStmt, CallExpr, CaseClause, ChanType, CommClause, Comment, CommentGroup, CompositeLit, DeclStmt, DeferStmt, Ellipsis, EmptyStmt, ExprStmt, Field, FieldList, File, ForStmt, FuncDecl, FuncLit, FuncType, GenDecl, GoStmt, Ident, IfStmt, ImportSpec, IncDecStmt, IndexExpr, IndexListExpr, InterfaceType, KeyValueExpr, LabeledStmt, MapType, Package, ParenExpr, RangeStmt, ReturnStmt, SelectStmt, SelectorExpr, SendStmt, SliceExpr, StarExpr, StructType, SwitchStmt, TypeAssertExpr, TypeSpec, TypeSwitchStmt, UnaryExpr, ValueSpec } from "./ast.gs.js" +export type { ChanDir, Decl, Expr, Node, Spec, Stmt } from "./ast.gs.js" +export { CommentMap_Comments, CommentMap_Filter, CommentMap_String, CommentMap_Update, NewCommentMap } from "./commentmap.gs.js" +export type { CommentMap } from "./commentmap.gs.js" +export { FileExports, FilterDecl, FilterFile, FilterFuncDuplicates, FilterImportDuplicates, FilterPackage, FilterUnassociatedComments, MergePackageFiles, PackageExports } from "./filter.gs.js" +export type { Filter, MergeMode } from "./filter.gs.js" +export { SortImports } from "./import.gs.js" +export { Fprint, NotNilFilter, Print } from "./print.gs.js" +export type { FieldFilter } from "./print.gs.js" +export { NewPackage } from "./resolve.gs.js" +export type { Importer } from "./resolve.gs.js" +export { Bad, Con, Fun, Lbl, NewObj, NewScope, ObjKind_String, Pkg, Typ, Var } from "./scope.gs.js" +export { Object, Scope } from "./scope.gs.js" +export type { ObjKind } from "./scope.gs.js" +export { Inspect, Preorder, Walk } from "./walk.gs.js" +export type { Visitor } from "./walk.gs.js" diff --git a/compliance/deps/go/ast/print.gs.ts b/compliance/deps/go/ast/print.gs.ts new file mode 100644 index 00000000..fe26eea6 --- /dev/null +++ b/compliance/deps/go/ast/print.gs.ts @@ -0,0 +1,392 @@ +import * as $ from "@goscript/builtin/index.js" +import { IsExported } from "./ast.gs.js"; + +import * as fmt from "@goscript/fmt/index.js" + +import * as token from "@goscript/go/token/index.js" + +import * as io from "@goscript/io/index.js" + +import * as os from "@goscript/os/index.js" + +import * as reflect from "@goscript/reflect/index.js" + +export type FieldFilter = ((name: string, value: reflect.Value) => boolean) | null; + +export class localError { + public get err(): $.GoError { + return this._fields.err.value + } + public set err(value: $.GoError) { + this._fields.err.value = value + } + + public _fields: { + err: $.VarRef<$.GoError>; + } + + constructor(init?: Partial<{err?: $.GoError}>) { + this._fields = { + err: $.varRef(init?.err ?? null) + } + } + + public clone(): localError { + const cloned = new localError() + cloned._fields = { + err: $.varRef(this._fields.err.value) + } + return cloned + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'localError', + new localError(), + [], + localError, + {"err": { kind: $.TypeKind.Interface, name: 'GoError', methods: [{ name: 'Error', args: [], returns: [{ type: { kind: $.TypeKind.Basic, name: 'string' } }] }] }} + ); +} + +export class printer { + public get output(): io.Writer { + return this._fields.output.value + } + public set output(value: io.Writer) { + this._fields.output.value = value + } + + public get fset(): token.FileSet | null { + return this._fields.fset.value + } + public set fset(value: token.FileSet | null) { + this._fields.fset.value = value + } + + public get filter(): FieldFilter | null { + return this._fields.filter.value + } + public set filter(value: FieldFilter | null) { + this._fields.filter.value = value + } + + // *T -> line number + public get ptrmap(): Map | null { + return this._fields.ptrmap.value + } + public set ptrmap(value: Map | null) { + this._fields.ptrmap.value = value + } + + // current indentation level + public get indent(): number { + return this._fields.indent.value + } + public set indent(value: number) { + this._fields.indent.value = value + } + + // the last byte processed by Write + public get last(): number { + return this._fields.last.value + } + public set last(value: number) { + this._fields.last.value = value + } + + // current line number + public get line(): number { + return this._fields.line.value + } + public set line(value: number) { + this._fields.line.value = value + } + + public _fields: { + output: $.VarRef; + fset: $.VarRef; + filter: $.VarRef; + ptrmap: $.VarRef | null>; + indent: $.VarRef; + last: $.VarRef; + line: $.VarRef; + } + + constructor(init?: Partial<{filter?: FieldFilter | null, fset?: token.FileSet | null, indent?: number, last?: number, line?: number, output?: io.Writer, ptrmap?: Map | null}>) { + this._fields = { + output: $.varRef(init?.output ?? null), + fset: $.varRef(init?.fset ?? null), + filter: $.varRef(init?.filter ?? new FieldFilter | null(null)), + ptrmap: $.varRef(init?.ptrmap ?? null), + indent: $.varRef(init?.indent ?? 0), + last: $.varRef(init?.last ?? 0), + line: $.varRef(init?.line ?? 0) + } + } + + public clone(): printer { + const cloned = new printer() + cloned._fields = { + output: $.varRef(this._fields.output.value), + fset: $.varRef(this._fields.fset.value ? $.markAsStructValue(this._fields.fset.value.clone()) : null), + filter: $.varRef(this._fields.filter.value), + ptrmap: $.varRef(this._fields.ptrmap.value), + indent: $.varRef(this._fields.indent.value), + last: $.varRef(this._fields.last.value), + line: $.varRef(this._fields.line.value) + } + return cloned + } + + public Write(data: $.Bytes): [number, $.GoError] { + const p = this + let n: number = 0 + let err: $.GoError = null + let m: number = 0 + for (let i = 0; i < $.len(data); i++) { + const b = data![i] + { + // invariant: data[0:n] has been written + if (b == 10) { + ;[m, err] = p.output!.Write($.goSlice(data, n, i + 1)) + n += m + if (err != null) { + return [n, err] + } + p.line++ + } + else if (p.last == 10) { + ;[, err] = fmt.Fprintf(p.output, "%6d ", p.line) + if (err != null) { + return [n, err] + } + for (let j = p.indent; j > 0; j--) { + ;[, err] = p.output!.Write(indent) + if (err != null) { + return [n, err] + } + } + } + p.last = b + } + } + if ($.len(data) > n) { + ;[m, err] = p.output!.Write($.goSlice(data, n, undefined)) + n += m + } + return [n, err] + } + + // printf is a convenience wrapper that takes care of print errors. + public printf(format: string, ...args: any[]): void { + const p = this + { + let [, err] = fmt.Fprintf(p, format, ...(args ?? [])) + if (err != null) { + $.panic($.markAsStructValue(new localError({}))) + } + } + } + + public print(x: reflect.Value): void { + const p = this + if (!NotNilFilter("", x)) { + p.printf("nil") + return + } + switch (x.Kind()) { + case reflect.Interface: + p.print(x.Elem()) + break + case reflect.Map: + p.printf("%s (len = %d) {", x.Type(), x.Len()) + if (x.Len() > 0) { + p.indent++ + p.printf("\n") + for (let _i = 0; _i < $.len(x.MapKeys()); _i++) { + const key = x.MapKeys()![_i] + { + p.print(key) + p.printf(": ") + p.print(x.MapIndex(key)) + p.printf("\n") + } + } + p.indent-- + } + p.printf("}") + break + case reflect.Pointer: + p.printf("*") + let ptr = await x.Interface() + { + let [line, exists] = $.mapGet(p.ptrmap, ptr, 0) + if (exists) { + p.printf("(obj @ %d)", line) + } + else { + $.mapSet(p.ptrmap, ptr, p.line) + p.print(x.Elem()) + } + } + break + case reflect.Array: + p.printf("%s {", x.Type()) + if (x.Len() > 0) { + p.indent++ + p.printf("\n") + for (let i = 0, n = x.Len(); i < n; i++) { + p.printf("%d: ", i) + p.print(x.Index(i)) + p.printf("\n") + } + p.indent-- + } + p.printf("}") + break + case reflect.Slice: + { + let { value: s, ok: ok } = $.typeAssert<$.Bytes>(await x.Interface(), {kind: $.TypeKind.Slice, elemType: {kind: $.TypeKind.Basic, name: 'number'}}) + if (ok) { + p.printf("%#q", s) + return + } + } + p.printf("%s (len = %d) {", x.Type(), x.Len()) + if (x.Len() > 0) { + p.indent++ + p.printf("\n") + for (let i = 0, n = x.Len(); i < n; i++) { + p.printf("%d: ", i) + p.print(x.Index(i)) + p.printf("\n") + } + p.indent-- + } + p.printf("}") + break + case reflect.Struct: + let t = x.Type() + p.printf("%s {", t) + p.indent++ + let first = true + for (let i = 0, n = t!.NumField(); i < n; i++) { + // exclude non-exported fields because their + // values cannot be accessed via reflection + { + let name = (t!.Field(i))!.Name + if (IsExported(name)) { + let value = $.markAsStructValue(x.Field(i).clone()) + if (p.filter == null || p.filter!(name, value)) { + if (first) { + p.printf("\n") + first = false + } + p.printf("%s: ", name) + p.print(value) + p.printf("\n") + } + } + } + } + p.indent-- + p.printf("}") + break + default: + let v = await x.Interface() + $.typeSwitch(v, [{ types: [{kind: $.TypeKind.Basic, name: 'string'}], body: (v) => { + p.printf("%q", v) + return + }}, + { types: ['token.Pos'], body: (v) => { + if (p.fset != null) { + p.printf("%s", await p.fset!.Position(v)) + return + } + }}]) + p.printf("%v", v) + break + } + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'printer', + new printer(), + [{ name: "Write", args: [{ name: "data", type: { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Basic, name: "number" } } }], returns: [{ type: { kind: $.TypeKind.Basic, name: "number" } }, { type: { kind: $.TypeKind.Interface, name: 'GoError', methods: [{ name: 'Error', args: [], returns: [{ type: { kind: $.TypeKind.Basic, name: 'string' } }] }] } }] }, { name: "printf", args: [{ name: "format", type: { kind: $.TypeKind.Basic, name: "string" } }, { name: "args", type: { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Interface, methods: [] } } }], returns: [] }, { name: "print", args: [{ name: "x", type: "Value" }], returns: [] }], + printer, + {"output": "Writer", "fset": { kind: $.TypeKind.Pointer, elemType: "FileSet" }, "filter": "FieldFilter", "ptrmap": { kind: $.TypeKind.Map, keyType: { kind: $.TypeKind.Interface, methods: [] }, elemType: { kind: $.TypeKind.Basic, name: "number" } }, "indent": { kind: $.TypeKind.Basic, name: "number" }, "last": { kind: $.TypeKind.Basic, name: "number" }, "line": { kind: $.TypeKind.Basic, name: "number" }} + ); +} + +let indent: $.Bytes = $.stringToBytes(". ") + +// NotNilFilter is a [FieldFilter] that returns true for field values +// that are not nil; it returns false otherwise. +export function NotNilFilter(_: string, v: reflect.Value): boolean { + switch (v.Kind()) { + case reflect.Chan: + case reflect.Func: + case reflect.Interface: + case reflect.Map: + case reflect.Pointer: + case reflect.Slice: + return !v.IsNil() + break + } + return true +} + +// Fprint prints the (sub-)tree starting at AST node x to w. +// If fset != nil, position information is interpreted relative +// to that file set. Otherwise positions are printed as integer +// values (file set specific offsets). +// +// A non-nil [FieldFilter] f may be provided to control the output: +// struct fields for which f(fieldname, fieldvalue) is true are +// printed; all others are filtered from the output. Unexported +// struct fields are never printed. +export function Fprint(w: io.Writer, fset: token.FileSet | null, x: null | any, f: FieldFilter | null): $.GoError { + return fprint(w, fset, x, f) +} + +export function fprint(w: io.Writer, fset: token.FileSet | null, x: null | any, f: FieldFilter | null): $.GoError { + let err: $.GoError = null + { + using __defer = new $.DisposableStack(); + // setup printer + + // force printing of line number on first line + let p = $.markAsStructValue(new printer({filter: f, fset: fset, last: 10, output: w, ptrmap: $.makeMap()})) + + // install error handler + + // re-panics if it's not a localError + __defer.defer(() => { + { + let e = $.recover() + if (e != null) { + err = $.mustTypeAssert(e, 'localError').err // re-panics if it's not a localError + } + } + }); + + // print x + if (x == null) { + p.printf("nil\n") + return err + } + p.print(reflect.ValueOf(x)) + p.printf("\n") + + return err + } +} + +// Print prints x to standard output, skipping nil fields. +// Print(fset, x) is the same as Fprint(os.Stdout, fset, x, NotNilFilter). +export function Print(fset: token.FileSet | null, x: null | any): $.GoError { + return Fprint(os.Stdout, fset, x, NotNilFilter) +} + diff --git a/compliance/deps/go/ast/resolve.gs.ts b/compliance/deps/go/ast/resolve.gs.ts new file mode 100644 index 00000000..ca265f3e --- /dev/null +++ b/compliance/deps/go/ast/resolve.gs.ts @@ -0,0 +1,298 @@ +import * as $ from "@goscript/builtin/index.js" +import { NewObj, NewScope } from "./scope.gs.js"; +import { File, Ident, Package } from "./ast.gs.js"; +import { Object, Scope } from "./scope.gs.js"; + +import * as fmt from "@goscript/fmt/index.js" + +import * as scanner from "@goscript/go/scanner/index.js" + +import * as token from "@goscript/go/token/index.js" + +import * as strconv from "@goscript/strconv/index.js" + +export type Importer = ((imports: Map | null, path: string) => [Object | null, $.GoError]) | null; + +export class pkgBuilder { + public get fset(): token.FileSet | null { + return this._fields.fset.value + } + public set fset(value: token.FileSet | null) { + this._fields.fset.value = value + } + + public get errors(): scanner.ErrorList { + return this._fields.errors.value + } + public set errors(value: scanner.ErrorList) { + this._fields.errors.value = value + } + + public _fields: { + fset: $.VarRef; + errors: $.VarRef; + } + + constructor(init?: Partial<{errors?: scanner.ErrorList, fset?: token.FileSet | null}>) { + this._fields = { + fset: $.varRef(init?.fset ?? null), + errors: $.varRef(init?.errors ?? null as scanner.ErrorList) + } + } + + public clone(): pkgBuilder { + const cloned = new pkgBuilder() + cloned._fields = { + fset: $.varRef(this._fields.fset.value ? $.markAsStructValue(this._fields.fset.value.clone()) : null), + errors: $.varRef(this._fields.errors.value) + } + return cloned + } + + public error(pos: token.Pos, msg: string): void { + const p = this + scanner.ErrorList_Add(p._fields.errors, await p.fset!.Position(pos), msg) + } + + public errorf(pos: token.Pos, format: string, ...args: any[]): void { + const p = this + p.error(pos, fmt.Sprintf(format, ...(args ?? []))) + } + + public _declare(scope: Scope | null, altScope: Scope | null, obj: Object | null): void { + const p = this + let alt = scope!.Insert(obj) + if (alt == null && altScope != null) { + // see if there is a conflicting declaration in altScope + alt = altScope!.Lookup(obj!.Name) + } + if (alt != null) { + let prevDecl = "" + { + let pos = alt!.Pos() + if (token.Pos_IsValid(pos)) { + prevDecl = fmt.Sprintf("\n\tprevious declaration at %s", await p.fset!.Position(pos)) + } + } + p.error(obj!.Pos(), fmt.Sprintf("%s redeclared in this block%s", obj!.Name, prevDecl)) + } + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'pkgBuilder', + new pkgBuilder(), + [{ name: "error", args: [{ name: "pos", type: "Pos" }, { name: "msg", type: { kind: $.TypeKind.Basic, name: "string" } }], returns: [] }, { name: "errorf", args: [{ name: "pos", type: "Pos" }, { name: "format", type: { kind: $.TypeKind.Basic, name: "string" } }, { name: "args", type: { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Interface, methods: [] } } }], returns: [] }, { name: "declare", args: [{ name: "scope", type: { kind: $.TypeKind.Pointer, elemType: "Scope" } }, { name: "altScope", type: { kind: $.TypeKind.Pointer, elemType: "Scope" } }, { name: "obj", type: { kind: $.TypeKind.Pointer, elemType: "Object" } }], returns: [] }], + pkgBuilder, + {"fset": { kind: $.TypeKind.Pointer, elemType: "FileSet" }, "errors": "ErrorList"} + ); +} + +export function resolve(scope: Scope | null, ident: Ident | null): boolean { + for (; scope != null; scope = scope!.Outer) { + { + let obj = scope!.Lookup(ident!.Name) + if (obj != null) { + ident!.Obj = obj + return true + } + } + } + return false +} + +// NewPackage creates a new [Package] node from a set of [File] nodes. It resolves +// unresolved identifiers across files and updates each file's Unresolved list +// accordingly. If a non-nil importer and universe scope are provided, they are +// used to resolve identifiers not declared in any of the package files. Any +// remaining unresolved identifiers are reported as undeclared. If the files +// belong to different packages, one package name is selected and files with +// different package names are reported and then ignored. +// The result is a package node and a [scanner.ErrorList] if there were errors. +// +// Deprecated: use the type checker [go/types] instead; see [Object]. +export function NewPackage(fset: token.FileSet | null, files: Map | null, importer: Importer | null, universe: Scope | null): [Package | null, $.GoError] { + let p: pkgBuilder = new pkgBuilder({}) + p.fset = fset + + // complete package scope + let pkgName = "" + let pkgScope = NewScope(universe) + + // package names must match + + // ignore this file + + // collect top-level file objects in package scope + for (const [_k, file] of files?.entries() ?? []) { + { + // package names must match + + // ignore this file + {let name = file!.Name!.Name + switch (true) { + case pkgName == "": + pkgName = name + break + case name != pkgName: + p.errorf(file!.Package, "package %s; expected %s", name, pkgName) + continue + break + } + } + // collect top-level file objects in package scope + for (const [_k, obj] of file!.Scope!.Objects?.entries() ?? []) { + { + p._declare(pkgScope, null, obj) + } + } + } + } + + // package global mapping of imported package ids to package objects + let imports = $.makeMap() + + // complete file scopes with imports and resolve identifiers + + // ignore file if it belongs to a different package + // (error has already been reported) + + // build file scope by processing all imports + + // TODO(gri) If a local package name != "." is provided, + // global identifier resolution could proceed even if the + // import failed. Consider adjusting the logic here a bit. + + // local name overrides imported package name + + // add import to file scope + + // merge imported scope with file scope + + // declare imported package object in file scope + // (do not re-use pkg in the file scope but create + // a new object instead; the Decl field is different + // for different files) + + // resolve identifiers + + // don't use the universe scope without correct imports + // (objects in the universe may be shadowed by imports; + // with missing imports, identifiers might get resolved + // incorrectly to universe objects) + + // reset universe scope + for (const [_k, file] of files?.entries() ?? []) { + { + // ignore file if it belongs to a different package + // (error has already been reported) + if (file!.Name!.Name != pkgName) { + continue + } + + // build file scope by processing all imports + let importErrors = false + let fileScope = NewScope(pkgScope) + + // TODO(gri) If a local package name != "." is provided, + // global identifier resolution could proceed even if the + // import failed. Consider adjusting the logic here a bit. + + // local name overrides imported package name + + // add import to file scope + + // merge imported scope with file scope + + // declare imported package object in file scope + // (do not re-use pkg in the file scope but create + // a new object instead; the Decl field is different + // for different files) + for (let _i = 0; _i < $.len(file!.Imports); _i++) { + const spec = file!.Imports![_i] + { + if (importer == null) { + importErrors = true + continue + } + let [path, ] = strconv.Unquote(spec!.Path!.Value) + let [pkg, err] = importer!(imports, path) + + // TODO(gri) If a local package name != "." is provided, + // global identifier resolution could proceed even if the + // import failed. Consider adjusting the logic here a bit. + if (err != null) { + p.errorf(spec!.Path!.Pos(), "could not import %s (%s)", path, err) + importErrors = true + continue + } + + // local name overrides imported package name + let name = pkg!.Name + if (spec!.Name != null) { + name = spec!.Name!.Name + } + + // add import to file scope + + // merge imported scope with file scope + + // declare imported package object in file scope + // (do not re-use pkg in the file scope but create + // a new object instead; the Decl field is different + // for different files) + if (name == ".") { + // merge imported scope with file scope + for (const [_k, obj] of $.mustTypeAssert(pkg!.Data, {kind: $.TypeKind.Pointer, elemType: 'Scope'})!.Objects?.entries() ?? []) { + { + p._declare(fileScope, pkgScope, obj) + } + } + } + else if (name != "_") { + // declare imported package object in file scope + // (do not re-use pkg in the file scope but create + // a new object instead; the Decl field is different + // for different files) + let obj = NewObj(1, name) + obj!.Decl = spec + obj!.Data = pkg!.Data + p._declare(fileScope, pkgScope, obj) + } + } + } + + // resolve identifiers + + // don't use the universe scope without correct imports + // (objects in the universe may be shadowed by imports; + // with missing imports, identifiers might get resolved + // incorrectly to universe objects) + if (importErrors) { + // don't use the universe scope without correct imports + // (objects in the universe may be shadowed by imports; + // with missing imports, identifiers might get resolved + // incorrectly to universe objects) + pkgScope!.Outer = null + } + let i = 0 + for (let _i = 0; _i < $.len(file!.Unresolved); _i++) { + const ident = file!.Unresolved![_i] + { + if (!resolve(fileScope, ident)) { + p.errorf(ident!.Pos(), "undeclared name: %s", ident!.Name) + file!.Unresolved![i] = ident + i++ + } + + } + } + file!.Unresolved = $.goSlice(file!.Unresolved, 0, i) + pkgScope!.Outer = universe // reset universe scope + } + } + + scanner.ErrorList_Sort(p.errors) + return [new Package({}), scanner.ErrorList_Err(p.errors)] +} + diff --git a/compliance/deps/go/ast/scope.gs.ts b/compliance/deps/go/ast/scope.gs.ts new file mode 100644 index 00000000..91bbb6da --- /dev/null +++ b/compliance/deps/go/ast/scope.gs.ts @@ -0,0 +1,282 @@ +import * as $ from "@goscript/builtin/index.js" +import { AssignStmt, Field, FuncDecl, Ident, ImportSpec, LabeledStmt, TypeSpec, ValueSpec } from "./ast.gs.js"; + +import * as fmt from "@goscript/fmt/index.js" + +import * as token from "@goscript/go/token/index.js" + +import * as strings from "@goscript/strings/index.js" + +// for error handling +export let Bad: ObjKind = 0 + +// package +export let Pkg: ObjKind = 0 + +// constant +export let Con: ObjKind = 0 + +// type +export let Typ: ObjKind = 0 + +// variable +export let Var: ObjKind = 0 + +// function or method +export let Fun: ObjKind = 0 + +// label +export let Lbl: ObjKind = 0 + +export type ObjKind = number; + +export function ObjKind_String(kind: ObjKind): string { + return objKindStrings![kind] +} + + +export class Scope { + public get Outer(): Scope | null { + return this._fields.Outer.value + } + public set Outer(value: Scope | null) { + this._fields.Outer.value = value + } + + public get Objects(): Map | null { + return this._fields.Objects.value + } + public set Objects(value: Map | null) { + this._fields.Objects.value = value + } + + public _fields: { + Outer: $.VarRef; + Objects: $.VarRef | null>; + } + + constructor(init?: Partial<{Objects?: Map | null, Outer?: Scope | null}>) { + this._fields = { + Outer: $.varRef(init?.Outer ?? null), + Objects: $.varRef(init?.Objects ?? null) + } + } + + public clone(): Scope { + const cloned = new Scope() + cloned._fields = { + Outer: $.varRef(this._fields.Outer.value ? $.markAsStructValue(this._fields.Outer.value.clone()) : null), + Objects: $.varRef(this._fields.Objects.value) + } + return cloned + } + + // Lookup returns the object with the given name if it is + // found in scope s, otherwise it returns nil. Outer scopes + // are ignored. + public Lookup(name: string): Object | null { + const s = this + return $.mapGet(s.Objects, name, null)[0] + } + + // Insert attempts to insert a named object obj into the scope s. + // If the scope already contains an object alt with the same name, + // Insert leaves the scope unchanged and returns alt. Otherwise + // it inserts obj and returns nil. + public Insert(obj: Object | null): Object | null { + const s = this + let alt: Object | null = null + { + alt = $.mapGet(s.Objects, obj!.Name, null)[0] + if (alt == null) { + $.mapSet(s.Objects, obj!.Name, obj) + } + } + return alt + } + + // Debugging support + public String(): string { + const s = this + let buf: strings.Builder = new strings.Builder() + fmt.Fprintf(buf, "scope %p {", s) + if (s != null && $.len(s.Objects) > 0) { + fmt.Fprintln(buf) + for (const [_k, obj] of s.Objects?.entries() ?? []) { + { + fmt.Fprintf(buf, "\t%s %s\n", obj!.Kind, obj!.Name) + } + } + } + fmt.Fprintf(buf, "}\n") + return buf.String() + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'Scope', + new Scope(), + [{ name: "Lookup", args: [{ name: "name", type: { kind: $.TypeKind.Basic, name: "string" } }], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "Object" } }] }, { name: "Insert", args: [{ name: "obj", type: { kind: $.TypeKind.Pointer, elemType: "Object" } }], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "Object" } }] }, { name: "String", args: [], returns: [{ type: { kind: $.TypeKind.Basic, name: "string" } }] }], + Scope, + {"Outer": { kind: $.TypeKind.Pointer, elemType: "Scope" }, "Objects": { kind: $.TypeKind.Map, keyType: { kind: $.TypeKind.Basic, name: "string" }, elemType: { kind: $.TypeKind.Pointer, elemType: "Object" } }} + ); +} + +export class Object { + public get Kind(): ObjKind { + return this._fields.Kind.value + } + public set Kind(value: ObjKind) { + this._fields.Kind.value = value + } + + // declared name + public get Name(): string { + return this._fields.Name.value + } + public set Name(value: string) { + this._fields.Name.value = value + } + + // corresponding Field, XxxSpec, FuncDecl, LabeledStmt, AssignStmt, Scope; or nil + public get Decl(): null | any { + return this._fields.Decl.value + } + public set Decl(value: null | any) { + this._fields.Decl.value = value + } + + // object-specific data; or nil + public get Data(): null | any { + return this._fields.Data.value + } + public set Data(value: null | any) { + this._fields.Data.value = value + } + + // placeholder for type information; may be nil + public get Type(): null | any { + return this._fields.Type.value + } + public set Type(value: null | any) { + this._fields.Type.value = value + } + + public _fields: { + Kind: $.VarRef; + Name: $.VarRef; + Decl: $.VarRef; + Data: $.VarRef; + Type: $.VarRef; + } + + constructor(init?: Partial<{Data?: null | any, Decl?: null | any, Kind?: ObjKind, Name?: string, Type?: null | any}>) { + this._fields = { + Kind: $.varRef(init?.Kind ?? 0 as ObjKind), + Name: $.varRef(init?.Name ?? ""), + Decl: $.varRef(init?.Decl ?? null), + Data: $.varRef(init?.Data ?? null), + Type: $.varRef(init?.Type ?? null) + } + } + + public clone(): Object { + const cloned = new Object() + cloned._fields = { + Kind: $.varRef(this._fields.Kind.value), + Name: $.varRef(this._fields.Name.value), + Decl: $.varRef(this._fields.Decl.value), + Data: $.varRef(this._fields.Data.value), + Type: $.varRef(this._fields.Type.value) + } + return cloned + } + + // Pos computes the source position of the declaration of an object name. + // The result may be an invalid position if it cannot be computed + // (obj.Decl may be nil or not correct). + public Pos(): token.Pos { + const obj = this + let name = obj.Name + $.typeSwitch(obj.Decl, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'Field'}], body: (d) => { + for (let _i = 0; _i < $.len(d!.Names); _i++) { + const n = d!.Names![_i] + { + if (n!.Name == name) { + return n!.Pos() + } + } + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ImportSpec'}], body: (d) => { + if (d!.Name != null && d!.Name!.Name == name) { + return d!.Name!.Pos() + } + return d!.Path!.Pos() + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ValueSpec'}], body: (d) => { + for (let _i = 0; _i < $.len(d!.Names); _i++) { + const n = d!.Names![_i] + { + if (n!.Name == name) { + return n!.Pos() + } + } + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'TypeSpec'}], body: (d) => { + if (d!.Name!.Name == name) { + return d!.Name!.Pos() + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'FuncDecl'}], body: (d) => { + if (d!.Name!.Name == name) { + return d!.Name!.Pos() + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'LabeledStmt'}], body: (d) => { + if (d!.Label!.Name == name) { + return d!.Label!.Pos() + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'AssignStmt'}], body: (d) => { + for (let _i = 0; _i < $.len(d!.Lhs); _i++) { + const x = d!.Lhs![_i] + { + { + let { value: ident, ok: isIdent } = $.typeAssert(x, {kind: $.TypeKind.Pointer, elemType: 'Ident'}) + if (isIdent && ident!.Name == name) { + return ident!.Pos() + } + } + } + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'Scope'}], body: (d) => {}}]) + return token.NoPos + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'Object', + new Object(), + [{ name: "Pos", args: [], returns: [{ type: "Pos" }] }], + Object, + {"Kind": "ObjKind", "Name": { kind: $.TypeKind.Basic, name: "string" }, "Decl": { kind: $.TypeKind.Interface, methods: [] }, "Data": { kind: $.TypeKind.Interface, methods: [] }, "Type": { kind: $.TypeKind.Interface, methods: [] }} + ); +} + +let objKindStrings = $.arrayToSlice(["bad", "package", "const", "type", "var", "func", "label"]) + +// NewScope creates a new scope nested in the outer scope. +export function NewScope(outer: Scope | null): Scope | null { + // initial scope capacity + let n: number = 4 + return new Scope({}) +} + +// NewObj creates a new object of a given kind and name. +export function NewObj(kind: ObjKind, name: string): Object | null { + return new Object({Kind: kind, Name: name}) +} + diff --git a/compliance/deps/go/ast/walk.gs.ts b/compliance/deps/go/ast/walk.gs.ts new file mode 100644 index 00000000..8885eb34 --- /dev/null +++ b/compliance/deps/go/ast/walk.gs.ts @@ -0,0 +1,408 @@ +import * as $ from "@goscript/builtin/index.js" +import { ArrayType, AssignStmt, BadDecl, BadExpr, BadStmt, BasicLit, BinaryExpr, BlockStmt, BranchStmt, CallExpr, CaseClause, ChanType, CommClause, Comment, CommentGroup, CompositeLit, DeclStmt, DeferStmt, Ellipsis, EmptyStmt, ExprStmt, Field, FieldList, File, ForStmt, FuncDecl, FuncLit, FuncType, GenDecl, GoStmt, Ident, IfStmt, ImportSpec, IncDecStmt, IndexExpr, IndexListExpr, InterfaceType, KeyValueExpr, LabeledStmt, MapType, Node, Package, ParenExpr, RangeStmt, ReturnStmt, SelectStmt, SelectorExpr, SendStmt, SliceExpr, StarExpr, StructType, SwitchStmt, TypeAssertExpr, TypeSpec, TypeSwitchStmt, UnaryExpr, ValueSpec } from "./ast.gs.js"; + +import * as fmt from "@goscript/fmt/index.js" + +import * as iter from "@goscript/iter/index.js" + +export type Visitor = null | { + Visit(node: Node): Visitor +} + +$.registerInterfaceType( + 'Visitor', + null, // Zero value for interface is null + [{ name: "Visit", args: [{ name: "node", type: "Node" }], returns: [{ type: "Visitor" }] }] +); + +export type inspector = ((p0: Node) => boolean) | null; + +export function inspector_Visit(f: inspector, node: Node): Visitor { + if (f!(node)) { + return f + } + return null +} + + +export function walkList(v: Visitor, list: $.Slice): void { + for (let _i = 0; _i < $.len(list); _i++) { + const node = list![_i] + { + Walk(v, node) + } + } +} + +// Walk traverses an AST in depth-first order: It starts by calling +// v.Visit(node); node must not be nil. If the visitor w returned by +// v.Visit(node) is not nil, Walk is invoked recursively with visitor +// w for each of the non-nil children of node, followed by a call of +// w.Visit(nil). +export function Walk(v: Visitor, node: Node): void { + { + v = v!.Visit(node) + if (v == null) { + return + } + } + + // walk children + // (the order of the cases matches the order + // of the corresponding node types in ast.go) + + // Comments and fields + + // nothing to do + + // Expressions + + // nothing to do + + // Types + + // Statements + + // nothing to do + + // nothing to do + + // Declarations + + // nothing to do + + // Files and packages + + // don't walk n.Comments - they have been + // visited already through the individual + // nodes + $.typeSwitch(node, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'Comment'}], body: (n) => {}}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'CommentGroup'}], body: (n) => { + walkList(v, n!.List) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'Field'}], body: (n) => { + if (n!.Doc != null) { + Walk(v, n!.Doc) + } + walkList(v, n!.Names) + if (n!.Type != null) { + Walk(v, n!.Type) + } + if (n!.Tag != null) { + Walk(v, n!.Tag) + } + if (n!.Comment != null) { + Walk(v, n!.Comment) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'FieldList'}], body: (n) => { + walkList(v, n!.List) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'BadExpr'}, {kind: $.TypeKind.Pointer, elemType: 'Ident'}, {kind: $.TypeKind.Pointer, elemType: 'BasicLit'}], body: (n) => {}}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'Ellipsis'}], body: (n) => { + if (n!.Elt != null) { + Walk(v, n!.Elt) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'FuncLit'}], body: (n) => { + Walk(v, n!.Type) + Walk(v, n!.Body) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'CompositeLit'}], body: (n) => { + if (n!.Type != null) { + Walk(v, n!.Type) + } + walkList(v, n!.Elts) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ParenExpr'}], body: (n) => { + Walk(v, n!.X) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'SelectorExpr'}], body: (n) => { + Walk(v, n!.X) + Walk(v, n!.Sel) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'IndexExpr'}], body: (n) => { + Walk(v, n!.X) + Walk(v, n!.Index) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'IndexListExpr'}], body: (n) => { + Walk(v, n!.X) + walkList(v, n!.Indices) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'SliceExpr'}], body: (n) => { + Walk(v, n!.X) + if (n!.Low != null) { + Walk(v, n!.Low) + } + if (n!.High != null) { + Walk(v, n!.High) + } + if (n!.Max != null) { + Walk(v, n!.Max) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'TypeAssertExpr'}], body: (n) => { + Walk(v, n!.X) + if (n!.Type != null) { + Walk(v, n!.Type) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'CallExpr'}], body: (n) => { + Walk(v, n!.Fun) + walkList(v, n!.Args) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'StarExpr'}], body: (n) => { + Walk(v, n!.X) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'UnaryExpr'}], body: (n) => { + Walk(v, n!.X) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'BinaryExpr'}], body: (n) => { + Walk(v, n!.X) + Walk(v, n!.Y) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'KeyValueExpr'}], body: (n) => { + Walk(v, n!.Key) + Walk(v, n!.Value) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ArrayType'}], body: (n) => { + if (n!.Len != null) { + Walk(v, n!.Len) + } + Walk(v, n!.Elt) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'StructType'}], body: (n) => { + Walk(v, n!.Fields) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'FuncType'}], body: (n) => { + if (n!.TypeParams != null) { + Walk(v, n!.TypeParams) + } + if (n!.Params != null) { + Walk(v, n!.Params) + } + if (n!.Results != null) { + Walk(v, n!.Results) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'InterfaceType'}], body: (n) => { + Walk(v, n!.Methods) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'MapType'}], body: (n) => { + Walk(v, n!.Key) + Walk(v, n!.Value) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ChanType'}], body: (n) => { + Walk(v, n!.Value) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'BadStmt'}], body: (n) => {}}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'DeclStmt'}], body: (n) => { + Walk(v, n!.Decl) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'EmptyStmt'}], body: (n) => {}}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'LabeledStmt'}], body: (n) => { + Walk(v, n!.Label) + Walk(v, n!.Stmt) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ExprStmt'}], body: (n) => { + Walk(v, n!.X) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'SendStmt'}], body: (n) => { + Walk(v, n!.Chan) + Walk(v, n!.Value) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'IncDecStmt'}], body: (n) => { + Walk(v, n!.X) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'AssignStmt'}], body: (n) => { + walkList(v, n!.Lhs) + walkList(v, n!.Rhs) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'GoStmt'}], body: (n) => { + Walk(v, n!.Call) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'DeferStmt'}], body: (n) => { + Walk(v, n!.Call) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ReturnStmt'}], body: (n) => { + walkList(v, n!.Results) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'BranchStmt'}], body: (n) => { + if (n!.Label != null) { + Walk(v, n!.Label) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'BlockStmt'}], body: (n) => { + walkList(v, n!.List) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'IfStmt'}], body: (n) => { + if (n!.Init != null) { + Walk(v, n!.Init) + } + Walk(v, n!.Cond) + Walk(v, n!.Body) + if (n!.Else != null) { + Walk(v, n!.Else) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'CaseClause'}], body: (n) => { + walkList(v, n!.List) + walkList(v, n!.Body) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'SwitchStmt'}], body: (n) => { + if (n!.Init != null) { + Walk(v, n!.Init) + } + if (n!.Tag != null) { + Walk(v, n!.Tag) + } + Walk(v, n!.Body) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'TypeSwitchStmt'}], body: (n) => { + if (n!.Init != null) { + Walk(v, n!.Init) + } + Walk(v, n!.Assign) + Walk(v, n!.Body) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'CommClause'}], body: (n) => { + if (n!.Comm != null) { + Walk(v, n!.Comm) + } + walkList(v, n!.Body) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'SelectStmt'}], body: (n) => { + Walk(v, n!.Body) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ForStmt'}], body: (n) => { + if (n!.Init != null) { + Walk(v, n!.Init) + } + if (n!.Cond != null) { + Walk(v, n!.Cond) + } + if (n!.Post != null) { + Walk(v, n!.Post) + } + Walk(v, n!.Body) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'RangeStmt'}], body: (n) => { + if (n!.Key != null) { + Walk(v, n!.Key) + } + if (n!.Value != null) { + Walk(v, n!.Value) + } + Walk(v, n!.X) + Walk(v, n!.Body) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ImportSpec'}], body: (n) => { + if (n!.Doc != null) { + Walk(v, n!.Doc) + } + if (n!.Name != null) { + Walk(v, n!.Name) + } + Walk(v, n!.Path) + if (n!.Comment != null) { + Walk(v, n!.Comment) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ValueSpec'}], body: (n) => { + if (n!.Doc != null) { + Walk(v, n!.Doc) + } + walkList(v, n!.Names) + if (n!.Type != null) { + Walk(v, n!.Type) + } + walkList(v, n!.Values) + if (n!.Comment != null) { + Walk(v, n!.Comment) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'TypeSpec'}], body: (n) => { + if (n!.Doc != null) { + Walk(v, n!.Doc) + } + Walk(v, n!.Name) + if (n!.TypeParams != null) { + Walk(v, n!.TypeParams) + } + Walk(v, n!.Type) + if (n!.Comment != null) { + Walk(v, n!.Comment) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'BadDecl'}], body: (n) => {}}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'GenDecl'}], body: (n) => { + if (n!.Doc != null) { + Walk(v, n!.Doc) + } + walkList(v, n!.Specs) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'FuncDecl'}], body: (n) => { + if (n!.Doc != null) { + Walk(v, n!.Doc) + } + if (n!.Recv != null) { + Walk(v, n!.Recv) + } + Walk(v, n!.Name) + Walk(v, n!.Type) + if (n!.Body != null) { + Walk(v, n!.Body) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'File'}], body: (n) => { + if (n!.Doc != null) { + Walk(v, n!.Doc) + } + Walk(v, n!.Name) + walkList(v, n!.Decls) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'Package'}], body: (n) => { + for (const [_k, f] of n!.Files?.entries() ?? []) { + { + Walk(v, f) + } + } + }}], () => { + $.panic(fmt.Sprintf("ast.Walk: unexpected node type %T", n)) + }) + + null +} + +// Inspect traverses an AST in depth-first order: It starts by calling +// f(node); node must not be nil. If f returns true, Inspect invokes f +// recursively for each of the non-nil children of node, followed by a +// call of f(nil). +export function Inspect(node: Node, f: ((p0: Node) => boolean) | null): void { + Walk(Object.assign(f, { __goTypeName: 'inspector' }), node) +} + +// Preorder returns an iterator over all the nodes of the syntax tree +// beneath (and including) the specified root, in depth-first +// preorder. +// +// For greater control over the traversal of each subtree, use [Inspect]. +export function Preorder(root: Node): iter.Seq | null { + + // yield must not be called once ok is false. + return (_yield: ((p0: Node) => boolean) | null): void => { + let ok = true + + // yield must not be called once ok is false. + Inspect(root, (n: Node): boolean => { + + // yield must not be called once ok is false. + if (n != null) { + // yield must not be called once ok is false. + ok = ok && _yield!(n) + } + return ok + }) + } +} + diff --git a/compliance/deps/go/build/constraint/expr.gs.ts b/compliance/deps/go/build/constraint/expr.gs.ts new file mode 100644 index 00000000..5b8040a4 --- /dev/null +++ b/compliance/deps/go/build/constraint/expr.gs.ts @@ -0,0 +1,982 @@ +import * as $ from "@goscript/builtin/index.js" + +import * as errors from "@goscript/errors/index.js" + +import * as strings from "@goscript/strings/index.js" + +import * as unicode from "@goscript/unicode/index.js" + +import * as utf8 from "@goscript/unicode/utf8/index.js" + +let maxSize: number = 1000 + +export type Expr = null | { + // Eval reports whether the expression evaluates to true. + // It calls ok(tag) as needed to find out whether a given build tag + // is satisfied by the current build configuration. + Eval(ok: ((tag: string) => boolean) | null): boolean + // String returns the string form of the expression, + // using the boolean syntax used in //go:build lines. + String(): string + // The presence of an isExpr method explicitly marks the type as an Expr. + // Only implementations in this package should be used as Exprs. + isExpr(): void +} + +$.registerInterfaceType( + 'Expr', + null, // Zero value for interface is null + [{ name: "Eval", args: [{ name: "ok", type: { kind: $.TypeKind.Function, params: [{ kind: $.TypeKind.Basic, name: "string" }], results: [{ kind: $.TypeKind.Basic, name: "boolean" }] } }], returns: [{ type: { kind: $.TypeKind.Basic, name: "boolean" } }] }, { name: "String", args: [], returns: [{ type: { kind: $.TypeKind.Basic, name: "string" } }] }, { name: "isExpr", args: [], returns: [] }] +); + +export class SyntaxError { + // byte offset in input where error was detected + public get Offset(): number { + return this._fields.Offset.value + } + public set Offset(value: number) { + this._fields.Offset.value = value + } + + // description of error + public get Err(): string { + return this._fields.Err.value + } + public set Err(value: string) { + this._fields.Err.value = value + } + + public _fields: { + Offset: $.VarRef; + Err: $.VarRef; + } + + constructor(init?: Partial<{Err?: string, Offset?: number}>) { + this._fields = { + Offset: $.varRef(init?.Offset ?? 0), + Err: $.varRef(init?.Err ?? "") + } + } + + public clone(): SyntaxError { + const cloned = new SyntaxError() + cloned._fields = { + Offset: $.varRef(this._fields.Offset.value), + Err: $.varRef(this._fields.Err.value) + } + return cloned + } + + public Error(): string { + const e = this + return e.Err + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'SyntaxError', + new SyntaxError(), + [{ name: "Error", args: [], returns: [{ type: { kind: $.TypeKind.Basic, name: "string" } }] }], + SyntaxError, + {"Offset": { kind: $.TypeKind.Basic, name: "number" }, "Err": { kind: $.TypeKind.Basic, name: "string" }} + ); +} + +export class TagExpr { + // for example, “linux” or “cgo” + public get Tag(): string { + return this._fields.Tag.value + } + public set Tag(value: string) { + this._fields.Tag.value = value + } + + public _fields: { + Tag: $.VarRef; + } + + constructor(init?: Partial<{Tag?: string}>) { + this._fields = { + Tag: $.varRef(init?.Tag ?? "") + } + } + + public clone(): TagExpr { + const cloned = new TagExpr() + cloned._fields = { + Tag: $.varRef(this._fields.Tag.value) + } + return cloned + } + + public isExpr(): void { + } + + public Eval(ok: ((tag: string) => boolean) | null): boolean { + const x = this + return ok!(x.Tag) + } + + public String(): string { + const x = this + return x.Tag + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'TagExpr', + new TagExpr(), + [{ name: "isExpr", args: [], returns: [] }, { name: "Eval", args: [{ name: "ok", type: { kind: $.TypeKind.Function, params: [{ kind: $.TypeKind.Basic, name: "string" }], results: [{ kind: $.TypeKind.Basic, name: "boolean" }] } }], returns: [{ type: { kind: $.TypeKind.Basic, name: "boolean" } }] }, { name: "String", args: [], returns: [{ type: { kind: $.TypeKind.Basic, name: "string" } }] }], + TagExpr, + {"Tag": { kind: $.TypeKind.Basic, name: "string" }} + ); +} + +export class exprParser { + // input string + public get s(): string { + return this._fields.s.value + } + public set s(value: string) { + this._fields.s.value = value + } + + // next read location in s + public get i(): number { + return this._fields.i.value + } + public set i(value: number) { + this._fields.i.value = value + } + + // last token read + public get tok(): string { + return this._fields.tok.value + } + public set tok(value: string) { + this._fields.tok.value = value + } + + public get isTag(): boolean { + return this._fields.isTag.value + } + public set isTag(value: boolean) { + this._fields.isTag.value = value + } + + // position (start) of last token + public get pos(): number { + return this._fields.pos.value + } + public set pos(value: number) { + this._fields.pos.value = value + } + + public get size(): number { + return this._fields.size.value + } + public set size(value: number) { + this._fields.size.value = value + } + + public _fields: { + s: $.VarRef; + i: $.VarRef; + tok: $.VarRef; + isTag: $.VarRef; + pos: $.VarRef; + size: $.VarRef; + } + + constructor(init?: Partial<{i?: number, isTag?: boolean, pos?: number, s?: string, size?: number, tok?: string}>) { + this._fields = { + s: $.varRef(init?.s ?? ""), + i: $.varRef(init?.i ?? 0), + tok: $.varRef(init?.tok ?? ""), + isTag: $.varRef(init?.isTag ?? false), + pos: $.varRef(init?.pos ?? 0), + size: $.varRef(init?.size ?? 0) + } + } + + public clone(): exprParser { + const cloned = new exprParser() + cloned._fields = { + s: $.varRef(this._fields.s.value), + i: $.varRef(this._fields.i.value), + tok: $.varRef(this._fields.tok.value), + isTag: $.varRef(this._fields.isTag.value), + pos: $.varRef(this._fields.pos.value), + size: $.varRef(this._fields.size.value) + } + return cloned + } + + // or parses a sequence of || expressions. + // On entry, the next input token has not yet been lexed. + // On exit, the next input token has been lexed and is in p.tok. + public or(): Expr { + const p = this + let x = p.and() + for (; p.tok == "||"; ) { + x = or(x, p.and()) + } + return x + } + + // and parses a sequence of && expressions. + // On entry, the next input token has not yet been lexed. + // On exit, the next input token has been lexed and is in p.tok. + public and(): Expr { + const p = this + let x = p.not() + for (; p.tok == "&&"; ) { + x = and(x, p.not()) + } + return x + } + + // not parses a ! expression. + // On entry, the next input token has not yet been lexed. + // On exit, the next input token has been lexed and is in p.tok. + public not(): Expr { + const p = this + p.size++ + if (p.size > 1000) { + $.panic(new SyntaxError({Err: "build expression too large", Offset: p.pos})) + } + p.lex() + if (p.tok == "!") { + p.lex() + if (p.tok == "!") { + $.panic(new SyntaxError({Err: "double negation not allowed", Offset: p.pos})) + } + return not(p.atom()) + } + return p.atom() + } + + // atom parses a tag or a parenthesized expression. + // On entry, the next input token HAS been lexed. + // On exit, the next input token has been lexed and is in p.tok. + public atom(): Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.tok == "(") { + using __defer = new $.DisposableStack(); + let pos = p.pos + __defer.defer(() => { + { + let e = $.recover() + if (e != null) { + const _temp_e = e + { + let { value: e, ok: ok } = $.typeAssert(_temp_e, {kind: $.TypeKind.Pointer, elemType: 'SyntaxError'}) + if (ok && e!.Err == "unexpected end of expression") { + e!.Err = "missing close paren" + } + } + $.panic(e) + } + } + }); + let x = p.or() + if (p.tok != ")") { + $.panic(new SyntaxError({Err: "missing close paren", Offset: pos})) + } + p.lex() + return x + } + if (!p.isTag) { + if (p.tok == "") { + $.panic(new SyntaxError({Err: "unexpected end of expression", Offset: p.pos})) + } + $.panic(new SyntaxError({Err: "unexpected token " + p.tok, Offset: p.pos})) + } + let tok = p.tok + p.lex() + return tag(tok) + } + + // lex finds and consumes the next token in the input stream. + // On return, p.tok is set to the token text, + // p.isTag reports whether the token was a tag, + // and p.pos records the byte offset of the start of the token in the input stream. + // If lex reaches the end of the input, p.tok is set to the empty string. + // For any other syntax error, lex panics with a SyntaxError. + public lex(): void { + const p = this + p.isTag = false + for (; p.i < $.len(p.s) && ($.indexString(p.s, p.i) == 32 || $.indexString(p.s, p.i) == 9); ) { + p.i++ + } + if (p.i >= $.len(p.s)) { + p.tok = "" + p.pos = p.i + return + } + switch ($.indexString(p.s, p.i)) { + case 40: + case 41: + case 33: + p.pos = p.i + p.i++ + p.tok = $.sliceString(p.s, p.pos, p.i) + return + break + case 38: + case 124: + if (p.i + 1 >= $.len(p.s) || $.indexString(p.s, p.i + 1) != $.indexString(p.s, p.i)) { + $.panic(new SyntaxError({Err: "invalid syntax at " + $.runeOrStringToString($.indexString(p.s, p.i)), Offset: p.i})) + } + p.pos = p.i + p.i += 2 + p.tok = $.sliceString(p.s, p.pos, p.i) + return + break + } + let tag = $.sliceString(p.s, p.i, undefined) + { + const _runes = $.stringToRunes(tag) + for (let i = 0; i < _runes.length; i++) { + const c = _runes[i] + { + if (!unicode.IsLetter(c) && !unicode.IsDigit(c) && c != 95 && c != 46) { + tag = $.sliceString(tag, undefined, i) + break + } + } + } + } + if (tag == "") { + let [c, ] = utf8.DecodeRuneInString($.sliceString(p.s, p.i, undefined)) + $.panic(new SyntaxError({Err: "invalid syntax at " + $.runeOrStringToString(c), Offset: p.i})) + } + p.pos = p.i + p.i += $.len(tag) + p.tok = $.sliceString(p.s, p.pos, p.i) + p.isTag = true + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'exprParser', + new exprParser(), + [{ name: "or", args: [], returns: [{ type: "Expr" }] }, { name: "and", args: [], returns: [{ type: "Expr" }] }, { name: "not", args: [], returns: [{ type: "Expr" }] }, { name: "atom", args: [], returns: [{ type: "Expr" }] }, { name: "lex", args: [], returns: [] }], + exprParser, + {"s": { kind: $.TypeKind.Basic, name: "string" }, "i": { kind: $.TypeKind.Basic, name: "number" }, "tok": { kind: $.TypeKind.Basic, name: "string" }, "isTag": { kind: $.TypeKind.Basic, name: "boolean" }, "pos": { kind: $.TypeKind.Basic, name: "number" }, "size": { kind: $.TypeKind.Basic, name: "number" }} + ); +} + +export class AndExpr { + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + public get Y(): Expr { + return this._fields.Y.value + } + public set Y(value: Expr) { + this._fields.Y.value = value + } + + public _fields: { + X: $.VarRef; + Y: $.VarRef; + } + + constructor(init?: Partial<{X?: Expr, Y?: Expr}>) { + this._fields = { + X: $.varRef(init?.X ?? null), + Y: $.varRef(init?.Y ?? null) + } + } + + public clone(): AndExpr { + const cloned = new AndExpr() + cloned._fields = { + X: $.varRef(this._fields.X.value), + Y: $.varRef(this._fields.Y.value) + } + return cloned + } + + public isExpr(): void { + } + + public Eval(ok: ((tag: string) => boolean) | null): boolean { + const x = this + let xok = x.X!.Eval(ok) + let yok = x.Y!.Eval(ok) + return xok && yok + } + + public String(): string { + const x = this + return andArg(x.X) + " && " + andArg(x.Y) + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'AndExpr', + new AndExpr(), + [{ name: "isExpr", args: [], returns: [] }, { name: "Eval", args: [{ name: "ok", type: { kind: $.TypeKind.Function, params: [{ kind: $.TypeKind.Basic, name: "string" }], results: [{ kind: $.TypeKind.Basic, name: "boolean" }] } }], returns: [{ type: { kind: $.TypeKind.Basic, name: "boolean" } }] }, { name: "String", args: [], returns: [{ type: { kind: $.TypeKind.Basic, name: "string" } }] }], + AndExpr, + {"X": "Expr", "Y": "Expr"} + ); +} + +export class NotExpr { + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + public _fields: { + X: $.VarRef; + } + + constructor(init?: Partial<{X?: Expr}>) { + this._fields = { + X: $.varRef(init?.X ?? null) + } + } + + public clone(): NotExpr { + const cloned = new NotExpr() + cloned._fields = { + X: $.varRef(this._fields.X.value) + } + return cloned + } + + public isExpr(): void { + } + + public Eval(ok: ((tag: string) => boolean) | null): boolean { + const x = this + return !x.X!.Eval(ok) + } + + public String(): string { + const x = this + let s = x.X!.String() + $.typeSwitch(x.X, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'AndExpr'}, {kind: $.TypeKind.Pointer, elemType: 'OrExpr'}], body: () => { + s = "(" + s + ")" + }}]) + return "!" + s + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'NotExpr', + new NotExpr(), + [{ name: "isExpr", args: [], returns: [] }, { name: "Eval", args: [{ name: "ok", type: { kind: $.TypeKind.Function, params: [{ kind: $.TypeKind.Basic, name: "string" }], results: [{ kind: $.TypeKind.Basic, name: "boolean" }] } }], returns: [{ type: { kind: $.TypeKind.Basic, name: "boolean" } }] }, { name: "String", args: [], returns: [{ type: { kind: $.TypeKind.Basic, name: "string" } }] }], + NotExpr, + {"X": "Expr"} + ); +} + +export class OrExpr { + public get X(): Expr { + return this._fields.X.value + } + public set X(value: Expr) { + this._fields.X.value = value + } + + public get Y(): Expr { + return this._fields.Y.value + } + public set Y(value: Expr) { + this._fields.Y.value = value + } + + public _fields: { + X: $.VarRef; + Y: $.VarRef; + } + + constructor(init?: Partial<{X?: Expr, Y?: Expr}>) { + this._fields = { + X: $.varRef(init?.X ?? null), + Y: $.varRef(init?.Y ?? null) + } + } + + public clone(): OrExpr { + const cloned = new OrExpr() + cloned._fields = { + X: $.varRef(this._fields.X.value), + Y: $.varRef(this._fields.Y.value) + } + return cloned + } + + public isExpr(): void { + } + + public Eval(ok: ((tag: string) => boolean) | null): boolean { + const x = this + let xok = x.X!.Eval(ok) + let yok = x.Y!.Eval(ok) + return xok || yok + } + + public String(): string { + const x = this + return orArg(x.X) + " || " + orArg(x.Y) + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'OrExpr', + new OrExpr(), + [{ name: "isExpr", args: [], returns: [] }, { name: "Eval", args: [{ name: "ok", type: { kind: $.TypeKind.Function, params: [{ kind: $.TypeKind.Basic, name: "string" }], results: [{ kind: $.TypeKind.Basic, name: "boolean" }] } }], returns: [{ type: { kind: $.TypeKind.Basic, name: "boolean" } }] }, { name: "String", args: [], returns: [{ type: { kind: $.TypeKind.Basic, name: "string" } }] }], + OrExpr, + {"X": "Expr", "Y": "Expr"} + ); +} + +let errComplex: $.GoError = errors.New("expression too complex for // +build lines") + +let errNotConstraint: $.GoError = errors.New("not a build constraint") + +export function tag(tag: string): Expr { + return new TagExpr({}) +} + +export function not(x: Expr): Expr { + return new NotExpr({}) +} + +export function andArg(x: Expr): string { + let s = x!.String() + { + let { ok: ok } = $.typeAssert(x, {kind: $.TypeKind.Pointer, elemType: 'OrExpr'}) + if (ok) { + s = "(" + s + ")" + } + } + return s +} + +export function and(x: Expr, y: Expr): Expr { + return new AndExpr({}) +} + +export function orArg(x: Expr): string { + let s = x!.String() + { + let { ok: ok } = $.typeAssert(x, {kind: $.TypeKind.Pointer, elemType: 'AndExpr'}) + if (ok) { + s = "(" + s + ")" + } + } + return s +} + +export function or(x: Expr, y: Expr): Expr { + return new OrExpr({}) +} + +// Parse parses a single build constraint line of the form “//go:build ...” or “// +build ...” +// and returns the corresponding boolean expression. +export function Parse(line: string): [Expr, $.GoError] { + { + let [text, ok] = splitGoBuild(line) + if (ok) { + return parseExpr(text) + } + } + { + let [text, ok] = splitPlusBuild(line) + if (ok) { + return parsePlusBuildExpr(text) + } + } + return [null, errNotConstraint] +} + +// IsGoBuild reports whether the line of text is a “//go:build” constraint. +// It only checks the prefix of the text, not that the expression itself parses. +export function IsGoBuild(line: string): boolean { + let [, ok] = splitGoBuild(line) + return ok +} + +// splitGoBuild splits apart the leading //go:build prefix in line from the build expression itself. +// It returns "", false if the input is not a //go:build line or if the input contains multiple lines. +export function splitGoBuild(line: string): [string, boolean] { + let expr: string = "" + let ok: boolean = false + { + // A single trailing newline is OK; otherwise multiple lines are not. + if ($.len(line) > 0 && $.indexString(line, $.len(line) - 1) == 10) { + line = $.sliceString(line, undefined, $.len(line) - 1) + } + if (strings.Contains(line, "\n")) { + return ["", false] + } + + if (!strings.HasPrefix(line, "//go:build")) { + return ["", false] + } + + line = strings.TrimSpace(line) + line = $.sliceString(line, $.len("//go:build"), undefined) + + // If strings.TrimSpace finds more to trim after removing the //go:build prefix, + // it means that the prefix was followed by a space, making this a //go:build line + // (as opposed to a //go:buildsomethingelse line). + // If line is empty, we had "//go:build" by itself, which also counts. + let trim = strings.TrimSpace(line) + if ($.len(line) == $.len(trim) && line != "") { + return ["", false] + } + + return [trim, true] + } +} + +// parseExpr parses a boolean build tag expression. +export function parseExpr(text: string): [Expr, $.GoError] { + let x: Expr = null + let err: $.GoError = null + { + using __defer = new $.DisposableStack(); + + // unreachable unless parser has a bug + __defer.defer(() => { + { + let e = $.recover() + if (e != null) { + const _temp_e = e + { + let { value: e, ok: ok } = $.typeAssert(_temp_e, {kind: $.TypeKind.Pointer, elemType: 'SyntaxError'}) + if (ok) { + err = e + return + } + } + $.panic(e) // unreachable unless parser has a bug + } + } + }); + + let p = new exprParser({s: text}) + x = p!.or() + if (p!.tok != "") { + $.panic(new SyntaxError({Err: "unexpected token " + p!.tok, Offset: p!.pos})) + } + return [x, null] + } +} + +// IsPlusBuild reports whether the line of text is a “// +build” constraint. +// It only checks the prefix of the text, not that the expression itself parses. +export function IsPlusBuild(line: string): boolean { + let [, ok] = splitPlusBuild(line) + return ok +} + +// splitPlusBuild splits apart the leading // +build prefix in line from the build expression itself. +// It returns "", false if the input is not a // +build line or if the input contains multiple lines. +export function splitPlusBuild(line: string): [string, boolean] { + let expr: string = "" + let ok: boolean = false + { + // A single trailing newline is OK; otherwise multiple lines are not. + if ($.len(line) > 0 && $.indexString(line, $.len(line) - 1) == 10) { + line = $.sliceString(line, undefined, $.len(line) - 1) + } + if (strings.Contains(line, "\n")) { + return ["", false] + } + + if (!strings.HasPrefix(line, "//")) { + return ["", false] + } + line = $.sliceString(line, $.len("//"), undefined) + // Note the space is optional; "//+build" is recognized too. + line = strings.TrimSpace(line) + + if (!strings.HasPrefix(line, "+build")) { + return ["", false] + } + line = $.sliceString(line, $.len("+build"), undefined) + + // If strings.TrimSpace finds more to trim after removing the +build prefix, + // it means that the prefix was followed by a space, making this a +build line + // (as opposed to a +buildsomethingelse line). + // If line is empty, we had "// +build" by itself, which also counts. + let trim = strings.TrimSpace(line) + if ($.len(line) == $.len(trim) && line != "") { + return ["", false] + } + + return [trim, true] + } +} + +// parsePlusBuildExpr parses a legacy build tag expression (as used with “// +build”). +export function parsePlusBuildExpr(text: string): [Expr, $.GoError] { + // Only allow up to 100 AND/OR operators for "old" syntax. + // This is much less than the limit for "new" syntax, + // but uses of old syntax were always very simple. + let maxOldSize: number = 100 + let size = 0 + + let x: Expr = null + for (let _i = 0; _i < $.len(strings.Fields(text)); _i++) { + const clause = strings.Fields(text)![_i] + { + let y: Expr = null + for (let _i = 0; _i < $.len(strings.Split(clause, ",")); _i++) { + const lit = strings.Split(clause, ",")![_i] + { + let z: Expr = null + let neg: boolean = false + if (strings.HasPrefix(lit, "!!") || lit == "!") { + z = tag("ignore") + } + else { + if (strings.HasPrefix(lit, "!")) { + neg = true + lit = $.sliceString(lit, $.len("!"), undefined) + } + if (isValidTag(lit)) { + z = tag(lit) + } + else { + z = tag("ignore") + } + if (neg) { + z = not(z) + } + } + if (y == null) { + y = z + } + else { + { + size++ + if (size > 100) { + return [null, errComplex] + } + } + y = and(y, z) + } + } + } + if (x == null) { + x = y + } + else { + { + size++ + if (size > 100) { + return [null, errComplex] + } + } + x = or(x, y) + } + } + } + if (x == null) { + x = tag("ignore") + } + return [x, null] +} + +// isValidTag reports whether the word is a valid build tag. +// Tags must be letters, digits, underscores or dots. +// Unlike in Go identifiers, all digits are fine (e.g., "386"). +export function isValidTag(word: string): boolean { + if (word == "") { + return false + } + { + const _runes = $.stringToRunes(word) + for (let i = 0; i < _runes.length; i++) { + const c = _runes[i] + { + if (!unicode.IsLetter(c) && !unicode.IsDigit(c) && c != 95 && c != 46) { + return false + } + } + } + } + return true +} + +// PlusBuildLines returns a sequence of “// +build” lines that evaluate to the build expression x. +// If the expression is too complex to convert directly to “// +build” lines, PlusBuildLines returns an error. +export function PlusBuildLines(x: Expr): [$.Slice, $.GoError] { + // Push all NOTs to the expression leaves, so that //go:build !(x && y) can be treated as !x || !y. + // This rewrite is both efficient and commonly needed, so it's worth doing. + // Essentially all other possible rewrites are too expensive and too rarely needed. + x = pushNot(x, false) + + // Split into AND of ORs of ANDs of literals (tag or NOT tag). + let split: $.Slice<$.Slice<$.Slice>> = null + for (let _i = 0; _i < $.len(appendSplitAnd(null, x)); _i++) { + const or = appendSplitAnd(null, x)![_i] + { + let ands: $.Slice<$.Slice> = null + for (let _i = 0; _i < $.len(appendSplitOr(null, or)); _i++) { + const and = appendSplitOr(null, or)![_i] + { + let lits: $.Slice = null + for (let _i = 0; _i < $.len(appendSplitAnd(null, and)); _i++) { + const lit = appendSplitAnd(null, and)![_i] + { + $.typeSwitch(lit, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'TagExpr'}, {kind: $.TypeKind.Pointer, elemType: 'NotExpr'}], body: () => { + lits = $.append(lits, lit) + }}], () => { + return [null, errComplex] + }) + } + } + ands = $.append(ands, lits) + } + } + split = $.append(split, ands) + } + } + + // If all the ORs have length 1 (no actual OR'ing going on), + // push the top-level ANDs to the bottom level, so that we get + // one // +build line instead of many. + let maxOr = 0 + for (let _i = 0; _i < $.len(split); _i++) { + const or = split![_i] + { + if (maxOr < $.len(or)) { + maxOr = $.len(or) + } + } + } + if (maxOr == 1) { + let lits: $.Slice = null + for (let _i = 0; _i < $.len(split); _i++) { + const or = split![_i] + { + lits = $.append(lits, or![0]) + } + } + split = $.arrayToSlice<$.Slice<$.Slice>>([[ lits ]], 2) + } + + // Prepare the +build lines. + let lines: $.Slice = null + for (let _i = 0; _i < $.len(split); _i++) { + const or = split![_i] + { + let line = "// +build" + for (let _i = 0; _i < $.len(or); _i++) { + const and = or![_i] + { + let clause = "" + for (let i = 0; i < $.len(and); i++) { + const lit = and![i] + { + if (i > 0) { + clause += "," + } + clause += lit!.String() + } + } + line += " " + clause + } + } + lines = $.append(lines, line) + } + } + + return [lines, null] +} + +// pushNot applies DeMorgan's law to push negations down the expression, +// so that only tags are negated in the result. +// (It applies the rewrites !(X && Y) => (!X || !Y) and !(X || Y) => (!X && !Y).) +export function pushNot(x: Expr, not: boolean): Expr { + + // unreachable + $.typeSwitch(x, [, + { types: [{kind: $.TypeKind.Pointer, elemType: 'NotExpr'}], body: (x) => { + { + let { ok: ok } = $.typeAssert(x!.X, {kind: $.TypeKind.Pointer, elemType: 'TagExpr'}) + if (ok && !not) { + return x + } + } + return pushNot(x!.X, !not) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'TagExpr'}], body: (x) => { + if (not) { + return new NotExpr({X: x}) + } + return x + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'AndExpr'}], body: (x) => { + let x1 = pushNot(x!.X, not) + let y1 = pushNot(x!.Y, not) + if (not) { + return or(x1, y1) + } + if (x1 == x!.X && y1 == x!.Y) { + return x + } + return and(x1, y1) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'OrExpr'}], body: (x) => { + let x1 = pushNot(x!.X, not) + let y1 = pushNot(x!.Y, not) + if (not) { + return and(x1, y1) + } + if (x1 == x!.X && y1 == x!.Y) { + return x + } + return or(x1, y1) + }}], () => { + return x + }) +} + +// appendSplitAnd appends x to list while splitting apart any top-level && expressions. +// For example, appendSplitAnd({W}, X && Y && Z) = {W, X, Y, Z}. +export function appendSplitAnd(list: $.Slice, x: Expr): $.Slice { + const _temp_x = x + { + let { value: x, ok: ok } = $.typeAssert(_temp_x, {kind: $.TypeKind.Pointer, elemType: 'AndExpr'}) + if (ok) { + list = appendSplitAnd(list, x!.X) + list = appendSplitAnd(list, x!.Y) + return list + } + } + return $.append(list, x) +} + +// appendSplitOr appends x to list while splitting apart any top-level || expressions. +// For example, appendSplitOr({W}, X || Y || Z) = {W, X, Y, Z}. +export function appendSplitOr(list: $.Slice, x: Expr): $.Slice { + const _temp_x = x + { + let { value: x, ok: ok } = $.typeAssert(_temp_x, {kind: $.TypeKind.Pointer, elemType: 'OrExpr'}) + if (ok) { + list = appendSplitOr(list, x!.X) + list = appendSplitOr(list, x!.Y) + return list + } + } + return $.append(list, x) +} + diff --git a/compliance/deps/go/build/constraint/index.ts b/compliance/deps/go/build/constraint/index.ts new file mode 100644 index 00000000..990e86a8 --- /dev/null +++ b/compliance/deps/go/build/constraint/index.ts @@ -0,0 +1,4 @@ +export { IsGoBuild, IsPlusBuild, Parse, PlusBuildLines } from "./expr.gs.js" +export { AndExpr, NotExpr, OrExpr, SyntaxError, TagExpr } from "./expr.gs.js" +export type { Expr } from "./expr.gs.js" +export { GoVersion } from "./vers.gs.js" diff --git a/compliance/deps/go/build/constraint/vers.gs.ts b/compliance/deps/go/build/constraint/vers.gs.ts new file mode 100644 index 00000000..594a5e86 --- /dev/null +++ b/compliance/deps/go/build/constraint/vers.gs.ts @@ -0,0 +1,101 @@ +import * as $ from "@goscript/builtin/index.js" +import { AndExpr, Expr, NotExpr, OrExpr, TagExpr } from "./expr.gs.js"; + +import * as strconv from "@goscript/strconv/index.js" + +import * as strings from "@goscript/strings/index.js" + +// GoVersion returns the minimum Go version implied by a given build expression. +// If the expression can be satisfied without any Go version tags, GoVersion returns an empty string. +// +// For example: +// +// GoVersion(linux && go1.22) = "go1.22" +// GoVersion((linux && go1.22) || (windows && go1.20)) = "go1.20" => go1.20 +// GoVersion(linux) = "" +// GoVersion(linux || (windows && go1.22)) = "" +// GoVersion(!go1.22) = "" +// +// GoVersion assumes that any tag or negated tag may independently be true, +// so that its analysis can be purely structural, without SAT solving. +// “Impossible” subexpressions may therefore affect the result. +// +// For example: +// +// GoVersion((linux && !linux && go1.20) || go1.21) = "go1.20" +export function GoVersion(x: Expr): string { + let v = minVersion(x, +1) + if (v < 0) { + return "" + } + if (v == 0) { + return "go1" + } + return "go1." + strconv.Itoa(v) +} + +// minVersion returns the minimum Go major version (9 for go1.9) +// implied by expression z, or if sign < 0, by expression !z. +export function minVersion(z: Expr, sign: number): number { + + // !foo implies nothing + + // not a go1.N tag + $.typeSwitch(z, [, + { types: [{kind: $.TypeKind.Pointer, elemType: 'AndExpr'}], body: (z) => { + let op = andVersion + if (sign < 0) { + op = orVersion + } + return op!(minVersion(z!.X, sign), minVersion(z!.Y, sign)) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'OrExpr'}], body: (z) => { + let op = orVersion + if (sign < 0) { + op = andVersion + } + return op!(minVersion(z!.X, sign), minVersion(z!.Y, sign)) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'NotExpr'}], body: (z) => { + return minVersion(z!.X, -sign) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'TagExpr'}], body: (z) => { + if (sign < 0) { + // !foo implies nothing + return -1 + } + if (z!.Tag == "go1") { + return 0 + } + let [, v, ] = strings.Cut(z!.Tag, "go1.") + let [n, err] = strconv.Atoi(v) + if (err != null) { + // not a go1.N tag + return -1 + } + return n + }}], () => { + return -1 + }) +} + +// andVersion returns the minimum Go version +// implied by the AND of two minimum Go versions, +// which is the max of the versions. +export function andVersion(x: number, y: number): number { + if (x > y) { + return x + } + return y +} + +// orVersion returns the minimum Go version +// implied by the OR of two minimum Go versions, +// which is the min of the versions. +export function orVersion(x: number, y: number): number { + if (x < y) { + return x + } + return y +} + diff --git a/compliance/deps/go/parser/index.ts b/compliance/deps/go/parser/index.ts new file mode 100644 index 00000000..7926d99b --- /dev/null +++ b/compliance/deps/go/parser/index.ts @@ -0,0 +1,2 @@ +export { AllErrors, DeclarationErrors, ImportsOnly, PackageClauseOnly, ParseComments, ParseDir, ParseExpr, ParseExprFrom, ParseFile, SkipObjectResolution, SpuriousErrors, Trace } from "./interface.gs.js" +export type { Mode } from "./interface.gs.js" diff --git a/compliance/deps/go/parser/interface.gs.ts b/compliance/deps/go/parser/interface.gs.ts new file mode 100644 index 00000000..45e11e9a --- /dev/null +++ b/compliance/deps/go/parser/interface.gs.ts @@ -0,0 +1,297 @@ +import * as $ from "@goscript/builtin/index.js" +import { bailout, parser } from "./parser.gs.js"; + +import * as bytes from "@goscript/bytes/index.js" + +import * as errors from "@goscript/errors/index.js" + +import * as ast from "@goscript/go/ast/index.js" + +import * as token from "@goscript/go/token/index.js" + +import * as io from "@goscript/io/index.js" + +import * as fs from "@goscript/io/fs/index.js" + +import * as os from "@goscript/os/index.js" + +import * as filepath from "@goscript/path/filepath/index.js" + +import * as strings from "@goscript/strings/index.js" + +// stop parsing after package clause +export let PackageClauseOnly: Mode = (1 << 0) + +// stop parsing after import declarations +export let ImportsOnly: Mode = 0 + +// parse comments and add them to AST +export let ParseComments: Mode = 0 + +// print a trace of parsed productions +export let Trace: Mode = 0 + +// report declaration errors +export let DeclarationErrors: Mode = 0 + +// same as AllErrors, for backward-compatibility +export let SpuriousErrors: Mode = 0 + +// skip deprecated identifier resolution; see ParseFile +export let SkipObjectResolution: Mode = 0 + +// report all errors (not just the first 10 on different lines) +export let AllErrors: Mode = 32 + +export type Mode = number; + +// If src != nil, readSource converts src to a []byte if possible; +// otherwise it returns an error. If src == nil, readSource returns +// the result of reading the file specified by filename. +export function readSource(filename: string, src: null | any): [$.Bytes, $.GoError] { + + // is io.Reader, but src is already available in []byte form + if (src != null) { + + // is io.Reader, but src is already available in []byte form + $.typeSwitch(src, [{ types: [{kind: $.TypeKind.Basic, name: 'string'}], body: (s) => { + return [$.stringToBytes(s), null] + }}, + { types: [{kind: $.TypeKind.Slice, elemType: {kind: $.TypeKind.Basic, name: 'number'}}], body: (s) => { + return [s, null] + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'bytes.Buffer'}], body: (s) => { + if (s != null) { + return [s!.Bytes(), null] + } + }}, + { types: ['io.Reader'], body: (s) => { + return io.ReadAll(s) + }}]) + return [null, errors.New("invalid source")] + } + return os.ReadFile(filename) +} + +// ParseFile parses the source code of a single Go source file and returns +// the corresponding [ast.File] node. The source code may be provided via +// the filename of the source file, or via the src parameter. +// +// If src != nil, ParseFile parses the source from src and the filename is +// only used when recording position information. The type of the argument +// for the src parameter must be string, []byte, or [io.Reader]. +// If src == nil, ParseFile parses the file specified by filename. +// +// The mode parameter controls the amount of source text parsed and +// other optional parser functionality. If the [SkipObjectResolution] +// mode bit is set (recommended), the object resolution phase of +// parsing will be skipped, causing File.Scope, File.Unresolved, and +// all Ident.Obj fields to be nil. Those fields are deprecated; see +// [ast.Object] for details. +// +// Position information is recorded in the file set fset, which must not be +// nil. +// +// If the source couldn't be read, the returned AST is nil and the error +// indicates the specific failure. If the source was read but syntax +// errors were found, the result is a partial AST (with [ast.Bad]* nodes +// representing the fragments of erroneous source code). Multiple errors +// are returned via a scanner.ErrorList which is sorted by source position. +export async function ParseFile(fset: token.FileSet | null, filename: string, src: null | any, mode: Mode): Promise<[ast.File | null, $.GoError]> { + let f: ast.File | null = null + let err: $.GoError = null + { + await using __defer = new $.AsyncDisposableStack(); + if (fset == null) { + $.panic("parser.ParseFile: no token.FileSet provided (fset == nil)") + } + + // get source + let text: $.Bytes + [text, err] = readSource(filename, src) + if (err != null) { + return [null, err] + } + + let file = await fset!.AddFile(filename, -1, $.len(text)) + + let p: parser = new parser({}) + + // resume same panic if it's not a bailout + + // set result values + + // source is not a valid Go source file - satisfy + // ParseFile API and return a valid (but) empty + // *ast.File + + // Ensure the start/end are consistent, + // whether parsing succeeded or not. + __defer.defer(async () => { + { + let e = $.recover() + if (e != null) { + // resume same panic if it's not a bailout + let { value: bail, ok: ok } = $.typeAssert(e, 'bailout') + if (!ok) { + $.panic(e) + } + else if (bail.msg != "") { + scanner.ErrorList_Add(p._fields.errors, await p.file!.Position(bail.pos), bail.msg) + } + } + } + if (f == null) { + // source is not a valid Go source file - satisfy + // ParseFile API and return a valid (but) empty + // *ast.File + f = new ast.File({Name: new ast.Ident(), Scope: null}) + } + f!.FileStart = (file!.Base() as token.Pos) + f!.FileEnd = (file!.Base() + file!.Size() as token.Pos) + scanner.ErrorList_Sort(p.errors) + err = scanner.ErrorList_Err(p.errors) + }); + + // parse source + p.init(file, text, mode) + f = p.parseFile() + + return [f, err] + } +} + +// ParseDir calls [ParseFile] for all files with names ending in ".go" in the +// directory specified by path and returns a map of package name -> package +// AST with all the packages found. +// +// If filter != nil, only the files with [fs.FileInfo] entries passing through +// the filter (and ending in ".go") are considered. The mode bits are passed +// to [ParseFile] unchanged. Position information is recorded in fset, which +// must not be nil. +// +// If the directory couldn't be read, a nil map and the respective error are +// returned. If a parse error occurred, a non-nil but incomplete map and the +// first error encountered are returned. +export async function ParseDir(fset: token.FileSet | null, path: string, filter: ((p0: fs.FileInfo) => boolean) | null, mode: Mode): Promise<[Map | null, $.GoError]> { + let pkgs: Map | null = null + let first: $.GoError = null + { + let [list, err] = await os.ReadDir(path) + if (err != null) { + return [null, err] + } + + pkgs = $.makeMap() + for (let _i = 0; _i < $.len(list); _i++) { + const d = list![_i] + { + if (d!.IsDir() || !strings.HasSuffix(d!.Name(), ".go")) { + continue + } + if (filter != null) { + let [info, err] = d!.Info() + if (err != null) { + return [null, err] + } + if (!filter!(info)) { + continue + } + } + let filename = filepath.Join(path, d!.Name()) + { + let [src, err] = await ParseFile(fset, filename, null, mode) + if (err == null) { + let name = src!.Name!.Name + let [pkg, found] = $.mapGet(pkgs, name, null) + if (!found) { + pkg = new ast.Package({Files: $.makeMap(), Name: name}) + $.mapSet(pkgs, name, pkg) + } + $.mapSet(pkg!.Files, filename, src) + } + else if (first == null) { + first = err + } + } + } + } + + return [pkgs, first] + } +} + +// ParseExprFrom is a convenience function for parsing an expression. +// The arguments have the same meaning as for [ParseFile], but the source must +// be a valid Go (type or value) expression. Specifically, fset must not +// be nil. +// +// If the source couldn't be read, the returned AST is nil and the error +// indicates the specific failure. If the source was read but syntax +// errors were found, the result is a partial AST (with [ast.Bad]* nodes +// representing the fragments of erroneous source code). Multiple errors +// are returned via a scanner.ErrorList which is sorted by source position. +export async function ParseExprFrom(fset: token.FileSet | null, filename: string, src: null | any, mode: Mode): Promise<[ast.Expr, $.GoError]> { + let expr: ast.Expr = null + let err: $.GoError = null + { + await using __defer = new $.AsyncDisposableStack(); + if (fset == null) { + $.panic("parser.ParseExprFrom: no token.FileSet provided (fset == nil)") + } + + // get source + let text: $.Bytes + [text, err] = readSource(filename, src) + if (err != null) { + return [null, err] + } + + let p: parser = new parser({}) + + // resume same panic if it's not a bailout + __defer.defer(async () => { + { + let e = $.recover() + if (e != null) { + // resume same panic if it's not a bailout + let { value: bail, ok: ok } = $.typeAssert(e, 'bailout') + if (!ok) { + $.panic(e) + } + else if (bail.msg != "") { + scanner.ErrorList_Add(p._fields.errors, await p.file!.Position(bail.pos), bail.msg) + } + } + } + scanner.ErrorList_Sort(p.errors) + err = scanner.ErrorList_Err(p.errors) + }); + + // parse expr + let file = await fset!.AddFile(filename, -1, $.len(text)) + p.init(file, text, mode) + expr = p.parseRhs() + + // If a semicolon was inserted, consume it; + // report an error if there's more tokens. + if (p.tok == token.SEMICOLON && p.lit == "\n") { + p.next() + } + p.expect(token.EOF) + + return [expr, err] + } +} + +// ParseExpr is a convenience function for obtaining the AST of an expression x. +// The position information recorded in the AST is undefined. The filename used +// in error messages is the empty string. +// +// If syntax errors were found, the result is a partial AST (with [ast.Bad]* nodes +// representing the fragments of erroneous source code). Multiple errors are +// returned via a scanner.ErrorList which is sorted by source position. +export async function ParseExpr(x: string): Promise<[ast.Expr, $.GoError]> { + return await ParseExprFrom(token.NewFileSet(), "", $.stringToBytes(x), 0) +} + diff --git a/compliance/deps/go/parser/parser.gs.ts b/compliance/deps/go/parser/parser.gs.ts new file mode 100644 index 00000000..464eb2d1 --- /dev/null +++ b/compliance/deps/go/parser/parser.gs.ts @@ -0,0 +1,3658 @@ +import * as $ from "@goscript/builtin/index.js" +import { resolveFile } from "./resolver.gs.js"; +import { Mode } from "./interface.gs.js"; + +import * as fmt from "@goscript/fmt/index.js" + +import * as ast from "@goscript/go/ast/index.js" + +import * as constraint from "@goscript/go/build/constraint/index.js" + +import * as scanner from "@goscript/go/scanner/index.js" + +import * as token from "@goscript/go/token/index.js" + +import * as strings from "@goscript/strings/index.js" + +let maxNestLev: number = 1e5 + +let basic: number = 0 + +let labelOk: number = 0 + +let rangeOk: number = 0 + +export class bailout { + public get pos(): token.Pos { + return this._fields.pos.value + } + public set pos(value: token.Pos) { + this._fields.pos.value = value + } + + public get msg(): string { + return this._fields.msg.value + } + public set msg(value: string) { + this._fields.msg.value = value + } + + public _fields: { + pos: $.VarRef; + msg: $.VarRef; + } + + constructor(init?: Partial<{msg?: string, pos?: token.Pos}>) { + this._fields = { + pos: $.varRef(init?.pos ?? 0 as token.Pos), + msg: $.varRef(init?.msg ?? "") + } + } + + public clone(): bailout { + const cloned = new bailout() + cloned._fields = { + pos: $.varRef(this._fields.pos.value), + msg: $.varRef(this._fields.msg.value) + } + return cloned + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'bailout', + new bailout(), + [], + bailout, + {"pos": "Pos", "msg": { kind: $.TypeKind.Basic, name: "string" }} + ); +} + +export class field { + public get name(): ast.Ident | null { + return this._fields.name.value + } + public set name(value: ast.Ident | null) { + this._fields.name.value = value + } + + public get typ(): ast.Expr { + return this._fields.typ.value + } + public set typ(value: ast.Expr) { + this._fields.typ.value = value + } + + public _fields: { + name: $.VarRef; + typ: $.VarRef; + } + + constructor(init?: Partial<{name?: ast.Ident | null, typ?: ast.Expr}>) { + this._fields = { + name: $.varRef(init?.name ?? null), + typ: $.varRef(init?.typ ?? null) + } + } + + public clone(): field { + const cloned = new field() + cloned._fields = { + name: $.varRef(this._fields.name.value ? $.markAsStructValue(this._fields.name.value.clone()) : null), + typ: $.varRef(this._fields.typ.value) + } + return cloned + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'field', + new field(), + [], + field, + {"name": { kind: $.TypeKind.Pointer, elemType: "Ident" }, "typ": "Expr"} + ); +} + +export type parseSpecFunction = ((doc: ast.CommentGroup | null, keyword: token.Token, iota: number) => ast.Spec) | null; + +export class parser { + public get file(): token.File | null { + return this._fields.file.value + } + public set file(value: token.File | null) { + this._fields.file.value = value + } + + public get errors(): scanner.ErrorList { + return this._fields.errors.value + } + public set errors(value: scanner.ErrorList) { + this._fields.errors.value = value + } + + public get scanner(): scanner.Scanner { + return this._fields.scanner.value + } + public set scanner(value: scanner.Scanner) { + this._fields.scanner.value = value + } + + // Tracing/debugging + // parsing mode + public get mode(): Mode { + return this._fields.mode.value + } + public set mode(value: Mode) { + this._fields.mode.value = value + } + + // == (mode&Trace != 0) + public get trace(): boolean { + return this._fields.trace.value + } + public set trace(value: boolean) { + this._fields.trace.value = value + } + + // indentation used for tracing output + public get indent(): number { + return this._fields.indent.value + } + public set indent(value: number) { + this._fields.indent.value = value + } + + // Comments + public get comments(): $.Slice { + return this._fields.comments.value + } + public set comments(value: $.Slice) { + this._fields.comments.value = value + } + + // last lead comment + public get leadComment(): ast.CommentGroup | null { + return this._fields.leadComment.value + } + public set leadComment(value: ast.CommentGroup | null) { + this._fields.leadComment.value = value + } + + // last line comment + public get lineComment(): ast.CommentGroup | null { + return this._fields.lineComment.value + } + public set lineComment(value: ast.CommentGroup | null) { + this._fields.lineComment.value = value + } + + // in top of file (before package clause) + public get top(): boolean { + return this._fields.top.value + } + public set top(value: boolean) { + this._fields.top.value = value + } + + // minimum Go version found in //go:build comment + public get goVersion(): string { + return this._fields.goVersion.value + } + public set goVersion(value: string) { + this._fields.goVersion.value = value + } + + // Next token + // token position + public get pos(): token.Pos { + return this._fields.pos.value + } + public set pos(value: token.Pos) { + this._fields.pos.value = value + } + + // one token look-ahead + public get tok(): token.Token { + return this._fields.tok.value + } + public set tok(value: token.Token) { + this._fields.tok.value = value + } + + // token literal + public get lit(): string { + return this._fields.lit.value + } + public set lit(value: string) { + this._fields.lit.value = value + } + + // Error recovery + // (used to limit the number of calls to parser.advance + // w/o making scanning progress - avoids potential endless + // loops across multiple parser functions during error recovery) + // last synchronization position + public get syncPos(): token.Pos { + return this._fields.syncPos.value + } + public set syncPos(value: token.Pos) { + this._fields.syncPos.value = value + } + + // number of parser.advance calls without progress + public get syncCnt(): number { + return this._fields.syncCnt.value + } + public set syncCnt(value: number) { + this._fields.syncCnt.value = value + } + + // Non-syntactic parser control + // < 0: in control clause, >= 0: in expression + public get exprLev(): number { + return this._fields.exprLev.value + } + public set exprLev(value: number) { + this._fields.exprLev.value = value + } + + // if set, the parser is parsing a rhs expression + public get inRhs(): boolean { + return this._fields.inRhs.value + } + public set inRhs(value: boolean) { + this._fields.inRhs.value = value + } + + // list of imports + public get imports(): $.Slice { + return this._fields.imports.value + } + public set imports(value: $.Slice) { + this._fields.imports.value = value + } + + // nestLev is used to track and limit the recursion depth + // during parsing. + public get nestLev(): number { + return this._fields.nestLev.value + } + public set nestLev(value: number) { + this._fields.nestLev.value = value + } + + public _fields: { + file: $.VarRef; + errors: $.VarRef; + scanner: $.VarRef; + mode: $.VarRef; + trace: $.VarRef; + indent: $.VarRef; + comments: $.VarRef<$.Slice>; + leadComment: $.VarRef; + lineComment: $.VarRef; + top: $.VarRef; + goVersion: $.VarRef; + pos: $.VarRef; + tok: $.VarRef; + lit: $.VarRef; + syncPos: $.VarRef; + syncCnt: $.VarRef; + exprLev: $.VarRef; + inRhs: $.VarRef; + imports: $.VarRef<$.Slice>; + nestLev: $.VarRef; + } + + constructor(init?: Partial<{comments?: $.Slice, errors?: scanner.ErrorList, exprLev?: number, file?: token.File | null, goVersion?: string, imports?: $.Slice, inRhs?: boolean, indent?: number, leadComment?: ast.CommentGroup | null, lineComment?: ast.CommentGroup | null, lit?: string, mode?: Mode, nestLev?: number, pos?: token.Pos, scanner?: scanner.Scanner, syncCnt?: number, syncPos?: token.Pos, tok?: token.Token, top?: boolean, trace?: boolean}>) { + this._fields = { + file: $.varRef(init?.file ?? null), + errors: $.varRef(init?.errors ?? null as scanner.ErrorList), + scanner: $.varRef(init?.scanner ? $.markAsStructValue(init.scanner.clone()) : new scanner.Scanner()), + mode: $.varRef(init?.mode ?? new Mode(0)), + trace: $.varRef(init?.trace ?? false), + indent: $.varRef(init?.indent ?? 0), + comments: $.varRef(init?.comments ?? null), + leadComment: $.varRef(init?.leadComment ?? null), + lineComment: $.varRef(init?.lineComment ?? null), + top: $.varRef(init?.top ?? false), + goVersion: $.varRef(init?.goVersion ?? ""), + pos: $.varRef(init?.pos ?? 0 as token.Pos), + tok: $.varRef(init?.tok ?? 0 as token.Token), + lit: $.varRef(init?.lit ?? ""), + syncPos: $.varRef(init?.syncPos ?? 0 as token.Pos), + syncCnt: $.varRef(init?.syncCnt ?? 0), + exprLev: $.varRef(init?.exprLev ?? 0), + inRhs: $.varRef(init?.inRhs ?? false), + imports: $.varRef(init?.imports ?? null), + nestLev: $.varRef(init?.nestLev ?? 0) + } + } + + public clone(): parser { + const cloned = new parser() + cloned._fields = { + file: $.varRef(this._fields.file.value ? $.markAsStructValue(this._fields.file.value.clone()) : null), + errors: $.varRef(this._fields.errors.value), + scanner: $.varRef($.markAsStructValue(this._fields.scanner.value.clone())), + mode: $.varRef(this._fields.mode.value), + trace: $.varRef(this._fields.trace.value), + indent: $.varRef(this._fields.indent.value), + comments: $.varRef(this._fields.comments.value), + leadComment: $.varRef(this._fields.leadComment.value ? $.markAsStructValue(this._fields.leadComment.value.clone()) : null), + lineComment: $.varRef(this._fields.lineComment.value ? $.markAsStructValue(this._fields.lineComment.value.clone()) : null), + top: $.varRef(this._fields.top.value), + goVersion: $.varRef(this._fields.goVersion.value), + pos: $.varRef(this._fields.pos.value), + tok: $.varRef(this._fields.tok.value), + lit: $.varRef(this._fields.lit.value), + syncPos: $.varRef(this._fields.syncPos.value), + syncCnt: $.varRef(this._fields.syncCnt.value), + exprLev: $.varRef(this._fields.exprLev.value), + inRhs: $.varRef(this._fields.inRhs.value), + imports: $.varRef(this._fields.imports.value), + nestLev: $.varRef(this._fields.nestLev.value) + } + return cloned + } + + public init(file: token.File | null, src: $.Bytes, mode: Mode): void { + const p = this + p.file = file + let eh = (pos: token.Position, msg: string): void => { + scanner.ErrorList_Add(p._fields.errors, pos, msg) + } + await p.scanner.Init(p.file, src, eh, scanner.ScanComments) + p.top = true + p.mode = mode + p.trace = (mode & 8) != 0 // for convenience (p.trace is used frequently) + p.next() + } + + public printTrace(...a: any[]): void { + const p = this + let dots: string = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " + let n: number = $.len(". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . ") + let pos = $.markAsStructValue(await p.file!.Position(p.pos).clone()) + fmt.Printf("%5d:%3d: ", pos.Line, pos.Column) + let i = 2 * p.indent + for (; i > 64; ) { + fmt.Print(". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . ") + i -= 64 + } + fmt.Print($.sliceString(". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . ", 0, i)) + fmt.Println(...(a ?? [])) + } + + // Advance to the next token. + public next0(): void { + const p = this + if (p.trace && token.Pos_IsValid(p.pos)) { + let s = token.Token_String(p.tok) + switch (true) { + case token.Token_IsLiteral(p.tok): + p.printTrace(s, p.lit) + break + case token.Token_IsOperator(p.tok): + case token.Token_IsKeyword(p.tok): + p.printTrace("\"" + s + "\"") + break + default: + p.printTrace(s) + break + } + } + for (; ; ) { + { + const _tmp = await p.scanner.Scan() + p.pos = _tmp[0] + p.tok = _tmp[1] + p.lit = _tmp[2] + } + + // Found a non-comment; top of file is over. + if (p.tok == token.COMMENT) { + if (p.top && strings.HasPrefix(p.lit, "//go:build")) { + { + let [x, err] = constraint.Parse(p.lit) + if (err == null) { + p.goVersion = constraint.GoVersion(x) + } + } + } + if ((p.mode & 4) == 0) { + continue + } + } + else { + // Found a non-comment; top of file is over. + p.top = false + } + break + } + } + + // Consume a comment and return it and the line on which it ends. + public consumeComment(): [ast.Comment | null, number] { + const p = this + let comment: ast.Comment | null = null + let endline: number = 0 + endline = p.file!.Line(p.pos) + if ($.indexString(p.lit, 1) == 42) { + // don't use range here - no need to decode Unicode code points + for (let i = 0; i < $.len(p.lit); i++) { + if ($.indexString(p.lit, i) == 10) { + endline++ + } + } + } + comment = new ast.Comment({Slash: p.pos, Text: p.lit}) + p.next0() + return [comment, endline] + } + + // Consume a group of adjacent comments, add it to the parser's + // comments list, and return it together with the line at which + // the last comment in the group ends. A non-comment token or n + // empty lines terminate a comment group. + public consumeCommentGroup(n: number): [ast.CommentGroup | null, number] { + const p = this + let comments: ast.CommentGroup | null = null + let endline: number = 0 + let list: $.Slice = null + endline = p.file!.Line(p.pos) + for (; p.tok == token.COMMENT && p.file!.Line(p.pos) <= endline + n; ) { + let comment: ast.Comment | null = null + ;[comment, endline] = p.consumeComment() + list = $.append(list, comment) + } + comments = new ast.CommentGroup({List: list}) + p.comments = $.append(p.comments, comments) + return [comments, endline] + } + + // Advance to the next non-comment token. In the process, collect + // any comment groups encountered, and remember the last lead and + // line comments. + // + // A lead comment is a comment group that starts and ends in a + // line without any other tokens and that is followed by a non-comment + // token on the line immediately after the comment group. + // + // A line comment is a comment group that follows a non-comment + // token on the same line, and that has no tokens after it on the line + // where it ends. + // + // Lead and line comments may be considered documentation that is + // stored in the AST. + public next(): void { + const p = this + p.leadComment = null + p.lineComment = null + let prev = p.pos + p.next0() + if (p.tok == token.COMMENT) { + let comment: ast.CommentGroup | null = null + let endline: number = 0 + + // The comment is on same line as the previous token; it + // cannot be a lead comment but may be a line comment. + + // The next token is on a different line, thus + // the last comment group is a line comment. + if (p.file!.Line(p.pos) == p.file!.Line(prev)) { + // The comment is on same line as the previous token; it + // cannot be a lead comment but may be a line comment. + ;[comment, endline] = p.consumeCommentGroup(0) + + // The next token is on a different line, thus + // the last comment group is a line comment. + if (p.file!.Line(p.pos) != endline || p.tok == token.SEMICOLON || p.tok == token.EOF) { + // The next token is on a different line, thus + // the last comment group is a line comment. + p.lineComment = comment + } + } + + // consume successor comments, if any + endline = -1 + for (; p.tok == token.COMMENT; ) { + ;[comment, endline] = p.consumeCommentGroup(1) + } + + // The next token is following on the line immediately after the + // comment group, thus the last comment group is a lead comment. + if (endline + 1 == p.file!.Line(p.pos)) { + // The next token is following on the line immediately after the + // comment group, thus the last comment group is a lead comment. + p.leadComment = comment + } + } + } + + public error(pos: token.Pos, msg: string): void { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "error: " + msg)) + }); + } + let epos = $.markAsStructValue(await p.file!.Position(pos).clone()) + if ((p.mode & 32) == 0) { + let n = $.len(p.errors) + + // discard - likely a spurious error + if (n > 0 && p.errors![n - 1]!.Pos.Line == epos.Line) { + return + } + if (n > 10) { + $.panic($.markAsStructValue(new bailout({}))) + } + } + scanner.ErrorList_Add(p._fields.errors, epos, msg) + } + + public errorExpected(pos: token.Pos, msg: string): void { + const p = this + msg = "expected " + msg + if (pos == p.pos) { + // the error happened at the current position; + // make the error message more specific + + // print 123 rather than 'INT', etc. + switch (true) { + case p.tok == token.SEMICOLON && p.lit == "\n": + msg += ", found newline" + break + case token.Token_IsLiteral(p.tok): + msg += ", found " + p.lit + break + default: + msg += ", found '" + token.Token_String(p.tok) + "'" + break + } + } + p.error(pos, msg) + } + + public expect(tok: token.Token): token.Pos { + const p = this + let pos = p.pos + if (p.tok != tok) { + p.errorExpected(pos, "'" + token.Token_String(tok) + "'") + } + p.next() // make progress + return pos + } + + // expect2 is like expect, but it returns an invalid position + // if the expected token is not found. + public expect2(tok: token.Token): token.Pos { + const p = this + let pos: token.Pos = 0 + if (p.tok == tok) { + pos = p.pos + } + else { + p.errorExpected(p.pos, "'" + token.Token_String(tok) + "'") + } + p.next() // make progress + return pos + } + + // expectClosing is like expect but provides a better error message + // for the common case of a missing comma before a newline. + public expectClosing(tok: token.Token, context: string): token.Pos { + const p = this + if (p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n") { + p.error(p.pos, "missing ',' before newline in " + context) + p.next() + } + return p.expect(tok) + } + + // expectSemi consumes a semicolon and returns the applicable line comment. + public expectSemi(): ast.CommentGroup | null { + const p = this + let comment: ast.CommentGroup | null = null + if (p.tok != token.RPAREN && p.tok != token.RBRACE) { + + // permit a ',' instead of a ';' but complain + + // explicit semicolon + + // use following comments + + // artificial semicolon + // use preceding comments + switch (p.tok) { + case token.COMMA: + p.errorExpected(p.pos, "';'") + // fallthrough // fallthrough statement skipped + break + case token.SEMICOLON: + if (p.lit == ";") { + // explicit semicolon + p.next() + comment = p.lineComment // use following comments + } + else { + // artificial semicolon + comment = p.lineComment // use preceding comments + p.next() + } + return comment + break + default: + p.errorExpected(p.pos, "';'") + p.advance(stmtStart) + break + } + } + return null + } + + public atComma(context: string, follow: token.Token): boolean { + const p = this + if (p.tok == token.COMMA) { + return true + } + if (p.tok != follow) { + let msg = "missing ','" + if (p.tok == token.SEMICOLON && p.lit == "\n") { + msg += " before newline" + } + p.error(p.pos, msg + " in " + context) + return true + } + return false + } + + // advance consumes tokens until the current token p.tok + // is in the 'to' set, or token.EOF. For error recovery. + public advance(to: Map | null): void { + const p = this + for (; p.tok != token.EOF; p.next()) { + + // Return only if parser made some progress since last + // sync or if it has not reached 10 advance calls without + // progress. Otherwise consume at least one token to + // avoid an endless parser loop (it is possible that + // both parseOperand and parseStmt call advance and + // correctly do not advance, thus the need for the + // invocation limit p.syncCnt). + + // Reaching here indicates a parser bug, likely an + // incorrect token list in this function, but it only + // leads to skipping of possibly correct code if a + // previous error is present, and thus is preferred + // over a non-terminating parse. + if ($.mapGet(to, p.tok, false)[0]) { + // Return only if parser made some progress since last + // sync or if it has not reached 10 advance calls without + // progress. Otherwise consume at least one token to + // avoid an endless parser loop (it is possible that + // both parseOperand and parseStmt call advance and + // correctly do not advance, thus the need for the + // invocation limit p.syncCnt). + if (p.pos == p.syncPos && p.syncCnt < 10) { + p.syncCnt++ + return + } + + // Reaching here indicates a parser bug, likely an + // incorrect token list in this function, but it only + // leads to skipping of possibly correct code if a + // previous error is present, and thus is preferred + // over a non-terminating parse. + if (p.pos > p.syncPos) { + p.syncPos = p.pos + p.syncCnt = 0 + return + } + // Reaching here indicates a parser bug, likely an + // incorrect token list in this function, but it only + // leads to skipping of possibly correct code if a + // previous error is present, and thus is preferred + // over a non-terminating parse. + } + } + } + + // safePos returns a valid file position for a given position: If pos + // is valid to begin with, safePos returns pos. If pos is out-of-range, + // safePos returns the EOF position. + // + // This is hack to work around "artificial" end positions in the AST which + // are computed by adding 1 to (presumably valid) token positions. If the + // token positions are invalid due to parse errors, the resulting end position + // may be past the file's EOF position, which would lead to panics if used + // later on. + public safePos(pos: token.Pos): token.Pos { + const p = this + using __defer = new $.DisposableStack(); + let res: token.Pos = 0 + __defer.defer(() => { + if ($.recover() != null) { + res = (p.file!.Base() + p.file!.Size() as token.Pos) // EOF position + } + }); + /* _ = */ p.file!.Offset(pos) // trigger a panic if position is out-of-range + return pos + } + + public parseIdent(): ast.Ident | null { + const p = this + let pos = p.pos + let name = "_" + if (p.tok == token.IDENT) { + name = p.lit + p.next() + } + else { + p.expect(token.IDENT) // use expect() error handling + } + return new ast.Ident({Name: name, NamePos: pos}) + } + + public parseIdentList(): $.Slice { + const p = this + using __defer = new $.DisposableStack(); + let list: $.Slice = null + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "IdentList")) + }); + } + list = $.append(list, p.parseIdent()) + for (; p.tok == token.COMMA; ) { + p.next() + list = $.append(list, p.parseIdent()) + } + return list + } + + // If lhs is set, result list elements which are identifiers are not resolved. + public parseExprList(): $.Slice { + const p = this + using __defer = new $.DisposableStack(); + let list: $.Slice = null + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "ExpressionList")) + }); + } + list = $.append(list, p.parseExpr()) + for (; p.tok == token.COMMA; ) { + p.next() + list = $.append(list, p.parseExpr()) + } + return list + } + + public parseList(inRhs: boolean): $.Slice { + const p = this + let old = p.inRhs + p.inRhs = inRhs + let list = p.parseExprList() + p.inRhs = old + return list + } + + public parseType(): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "Type")) + }); + } + let typ = p.tryIdentOrType() + if (typ == null) { + let pos = p.pos + p.errorExpected(pos, "type") + p.advance(exprEnd) + return new ast.BadExpr({From: pos, To: p.pos}) + } + return typ + } + + public parseQualifiedIdent(ident: ast.Ident | null): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "QualifiedIdent")) + }); + } + let typ = p.parseTypeName(ident) + if (p.tok == token.LBRACK) { + typ = p.parseTypeInstance(typ) + } + return typ + } + + // If the result is an identifier, it is not resolved. + public parseTypeName(ident: ast.Ident | null): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "TypeName")) + }); + } + if (ident == null) { + ident = p.parseIdent() + } + if (p.tok == token.PERIOD) { + // ident is a package name + p.next() + let sel = p.parseIdent() + return new ast.SelectorExpr({Sel: sel, X: ident}) + } + return ident + } + + // "[" has already been consumed, and lbrack is its position. + // If len != nil it is the already consumed array length. + public parseArrayType(lbrack: token.Pos, len: ast.Expr): ast.ArrayType | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "ArrayType")) + }); + } + if (len == null) { + p.exprLev++ + // always permit ellipsis for more fault-tolerant parsing + if (p.tok == token.ELLIPSIS) { + len = new ast.Ellipsis({Ellipsis: p.pos}) + p.next() + } + else if (p.tok != token.RBRACK) { + len = p.parseRhs() + } + p.exprLev-- + } + if (p.tok == token.COMMA) { + // Trailing commas are accepted in type parameter + // lists but not in array type declarations. + // Accept for better error handling but complain. + p.error(p.pos, "unexpected comma; expecting ]") + p.next() + } + p.expect(token.RBRACK) + let elt = p.parseType() + return new ast.ArrayType({Elt: elt, Lbrack: lbrack, Len: len}) + } + + public parseArrayFieldOrTypeInstance(x: ast.Ident | null): [ast.Ident | null, ast.Expr] { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "ArrayFieldOrTypeInstance")) + }); + } + let lbrack = p.expect(token.LBRACK) + let trailingComma = token.NoPos // if valid, the position of a trailing comma preceding the ']' + let args: $.Slice = null + if (p.tok != token.RBRACK) { + p.exprLev++ + args = $.append(args, p.parseRhs()) + for (; p.tok == token.COMMA; ) { + let comma = p.pos + p.next() + if (p.tok == token.RBRACK) { + trailingComma = comma + break + } + args = $.append(args, p.parseRhs()) + } + p.exprLev-- + } + let rbrack = p.expect(token.RBRACK) + if ($.len(args) == 0) { + // x []E + let elt = p.parseType() + return [x, new ast.ArrayType({Elt: elt, Lbrack: lbrack})] + } + if ($.len(args) == 1) { + let elt = p.tryIdentOrType() + + // x [P]E + + // Trailing commas are invalid in array type fields. + if (elt != null) { + // x [P]E + + // Trailing commas are invalid in array type fields. + if (token.Pos_IsValid(trailingComma)) { + // Trailing commas are invalid in array type fields. + p.error(trailingComma, "unexpected comma; expecting ]") + } + return [x, new ast.ArrayType({Elt: elt, Lbrack: lbrack, Len: args![0]})] + } + } + return [null, packIndexExpr(x, lbrack, args, rbrack)] + } + + public parseFieldDecl(): ast.Field | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "FieldDecl")) + }); + } + let doc = p.leadComment + let names: $.Slice = null + let typ: ast.Expr = null + switch (p.tok) { + case token.IDENT: + let name = p.parseIdent() + if (p.tok == token.PERIOD || p.tok == token.STRING || p.tok == token.SEMICOLON || p.tok == token.RBRACE) { + // embedded type + typ = name + if (p.tok == token.PERIOD) { + typ = p.parseQualifiedIdent(name) + } + } + else { + // name1, name2, ... T + names = $.arrayToSlice([name]) + for (; p.tok == token.COMMA; ) { + p.next() + names = $.append(names, p.parseIdent()) + } + // Careful dance: We don't know if we have an embedded instantiated + // type T[P1, P2, ...] or a field T of array type []E or [P]E. + + // T P + if ($.len(names) == 1 && p.tok == token.LBRACK) { + ;[name, typ] = p.parseArrayFieldOrTypeInstance(name) + if (name == null) { + names = null + } + } + else { + // T P + typ = p.parseType() + } + } + break + case token.MUL: + let star = p.pos + p.next() + if (p.tok == token.LPAREN) { + // *(T) + p.error(p.pos, "cannot parenthesize embedded type") + p.next() + typ = null + // expect closing ')' but no need to complain if missing + if (p.tok == token.RPAREN) { + p.next() + } + } + else { + // *T + typ = null + } + typ = new ast.StarExpr({Star: star, X: typ}) + break + case token.LPAREN: + p.error(p.pos, "cannot parenthesize embedded type") + p.next() + if (p.tok == token.MUL) { + // (*T) + let star = p.pos + p.next() + typ = new ast.StarExpr({Star: star, X: null}) + } + else { + // (T) + typ = null + } + if (p.tok == token.RPAREN) { + p.next() + } + break + default: + let pos = p.pos + p.errorExpected(pos, "field name or embedded type") + p.advance(exprEnd) + typ = new ast.BadExpr({From: pos, To: p.pos}) + break + } + let tag: ast.BasicLit | null = null + if (p.tok == token.STRING) { + tag = new ast.BasicLit({Kind: p.tok, Value: p.lit, ValuePos: p.pos}) + p.next() + } + let comment = p.expectSemi() + let field = new ast.Field({Comment: comment, Doc: doc, Names: names, Tag: tag, Type: typ}) + return field + } + + public parseStructType(): ast.StructType | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "StructType")) + }); + } + let pos = p.expect(token.STRUCT) + let lbrace = p.expect(token.LBRACE) + let list: $.Slice = null + for (; p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN; ) { + // a field declaration cannot start with a '(' but we accept + // it here for more robust parsing and better error messages + // (parseFieldDecl will check and complain if necessary) + list = $.append(list, p.parseFieldDecl()) + } + let rbrace = p.expect(token.RBRACE) + return new ast.StructType({Fields: new ast.FieldList({Closing: rbrace, List: list, Opening: lbrace}), Struct: pos}) + } + + public parsePointerType(): ast.StarExpr | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "PointerType")) + }); + } + let star = p.expect(token.MUL) + let base = p.parseType() + return new ast.StarExpr({Star: star, X: base}) + } + + public parseDotsType(): ast.Ellipsis | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "DotsType")) + }); + } + let pos = p.expect(token.ELLIPSIS) + let elt = p.parseType() + return new ast.Ellipsis({Ellipsis: pos, Elt: elt}) + } + + public parseParamDecl(name: ast.Ident | null, typeSetsOK: boolean): field { + const p = this + using __defer = new $.DisposableStack(); + let f: field = new field() + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "ParamDecl")) + }); + } + let ptok = p.tok + if (name != null) { + p.tok = token.IDENT // force token.IDENT case in switch below + } + else if (typeSetsOK && p.tok == token.TILDE) { + // "~" ... + return $.markAsStructValue(new field({})) + } + switch (p.tok) { + case token.IDENT: + if (name != null) { + f.name = name + p.tok = ptok + } + else { + f.name = p.parseIdent() + } + switch (p.tok) { + case token.IDENT: + case token.MUL: + case token.ARROW: + case token.FUNC: + case token.CHAN: + case token.MAP: + case token.STRUCT: + case token.INTERFACE: + case token.LPAREN: + f.typ = p.parseType() + break + case token.LBRACK: + { + const _tmp = p.parseArrayFieldOrTypeInstance(f.name) + f.name = _tmp[0] + f.typ = _tmp[1] + } + break + case token.ELLIPSIS: + f.typ = p.parseDotsType() + return f + break + case token.PERIOD: + f.typ = p.parseQualifiedIdent(f.name) + f.name = null + break + case token.TILDE: + if (typeSetsOK) { + f.typ = null + return f + } + break + case token.OR: + if (typeSetsOK) { + // name "|" typeset + f.typ = p.embeddedElem(f.name) + f.name = null + return f + } + break + } + break + case token.MUL: + case token.ARROW: + case token.FUNC: + case token.LBRACK: + case token.CHAN: + case token.MAP: + case token.STRUCT: + case token.INTERFACE: + case token.LPAREN: + f.typ = p.parseType() + break + case token.ELLIPSIS: + f.typ = p.parseDotsType() + return f + break + default: + p.errorExpected(p.pos, "')'") + p.advance(exprEnd) + break + } + if (typeSetsOK && p.tok == token.OR && f.typ != null) { + f.typ = p.embeddedElem(f.typ) + } + return f + } + + public parseParameterList(name0: ast.Ident | null, typ0: ast.Expr, closing: token.Token): $.Slice { + const p = this + using __defer = new $.DisposableStack(); + let params: $.Slice = null + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "ParameterList")) + }); + } + let tparams = closing == token.RBRACK + let pos0 = p.pos + if (name0 != null) { + pos0 = name0!.Pos() + } + else if (typ0 != null) { + pos0 = typ0!.Pos() + } + let list: $.Slice = null + // number of parameters that have an explicit name and type + let named: number = 0 + // number of parameters that have an explicit type + let typed: number = 0 + for (; name0 != null || p.tok != closing && p.tok != token.EOF; ) { + let par: field = new field() + if (typ0 != null) { + if (tparams) { + typ0 = p.embeddedElem(typ0) + } + par = $.markAsStructValue(new field({})) + } + else { + par = $.markAsStructValue(p.parseParamDecl(name0, tparams).clone()) + } + name0 = null // 1st name was consumed if present + typ0 = null // 1st typ was consumed if present + if (par.name != null || par.typ != null) { + list = $.append(list, par) + if (par.name != null && par.typ != null) { + named++ + } + if (par.typ != null) { + typed++ + } + } + if (!p.atComma("parameter list", closing)) { + break + } + p.next() + } + if ($.len(list) == 0) { + return params + } + if (named == 0) { + // all unnamed => found names are type names + for (let i = 0; i < $.len(list); i++) { + let par = list![i] + { + let typ = par!.name + if (typ != null) { + par!.typ = typ + par!.name = null + } + } + } + + // This is the same error handling as below, adjusted for type parameters only. + // See comment below for details. (go.dev/issue/64534) + + /* same as typed == 0 */ + // position error at closing ] + + // position at opening [ or first name + if (tparams) { + // This is the same error handling as below, adjusted for type parameters only. + // See comment below for details. (go.dev/issue/64534) + let errPos: token.Pos = 0 + let msg: string = "" + /* same as typed == 0 */ + // position error at closing ] + + // position at opening [ or first name + if (named == typed) { + errPos = p.pos // position error at closing ] + msg = "missing type constraint" + } + else { + errPos = pos0 // position at opening [ or first name + msg = "missing type parameter name" + if ($.len(list) == 1) { + msg += " or invalid array length" + } + } + p.error(errPos, msg) + } + } + else if (named != $.len(list)) { + // some named or we're in a type parameter list => all must be named + // left-most error position (or invalid) + let errPos: token.Pos = 0 + // current type (from right to left) + let typ: ast.Expr = null + + // correct position + + // par.typ == nil && typ == nil => we only have a par.name + for (let i = $.len(list) - 1; i >= 0; i--) { + + // correct position + + // par.typ == nil && typ == nil => we only have a par.name + { + let par = list![i] + if (par!.typ != null) { + typ = par!.typ + + // correct position + if (par!.name == null) { + errPos = typ!.Pos() + let n = ast.NewIdent("_") + n!.NamePos = errPos // correct position + par!.name = n + } + } + else if (typ != null) { + par!.typ = typ + } + else { + // par.typ == nil && typ == nil => we only have a par.name + errPos = par!.name!.Pos() + par!.typ = new ast.BadExpr({From: errPos, To: p.pos}) + } + } + } + + // Not all parameters are named because named != len(list). + // If named == typed, there must be parameters that have no types. + // They must be at the end of the parameter list, otherwise types + // would have been filled in by the right-to-left sweep above and + // there would be no error. + // If tparams is set, the parameter list is a type parameter list. + + // position error at closing token ) or ] + + // go.dev/issue/60812 + if (token.Pos_IsValid(errPos)) { + // Not all parameters are named because named != len(list). + // If named == typed, there must be parameters that have no types. + // They must be at the end of the parameter list, otherwise types + // would have been filled in by the right-to-left sweep above and + // there would be no error. + // If tparams is set, the parameter list is a type parameter list. + let msg: string = "" + + // position error at closing token ) or ] + + // go.dev/issue/60812 + if (named == typed) { + errPos = p.pos // position error at closing token ) or ] + if (tparams) { + msg = "missing type constraint" + } + else { + msg = "missing parameter type" + } + } + else { + + // go.dev/issue/60812 + if (tparams) { + msg = "missing type parameter name" + // go.dev/issue/60812 + if ($.len(list) == 1) { + msg += " or invalid array length" + } + } + else { + msg = "missing parameter name" + } + } + p.error(errPos, msg) + } + } + if (named == 0) { + // parameter list consists of types only + for (let _i = 0; _i < $.len(list); _i++) { + const par = list![_i] + { + assert(par.typ != null, "nil type in unnamed parameter list") + params = $.append(params, new ast.Field({Type: par.typ})) + } + } + return params + } + let names: $.Slice = null + let typ: ast.Expr = null + let addParams = (): void => { + assert(typ != null, "nil type in named parameter list") + let field = new ast.Field({Names: names, Type: typ}) + params = $.append(params, field) + names = null + } + for (let _i = 0; _i < $.len(list); _i++) { + const par = list![_i] + { + if (par.typ != typ) { + if ($.len(names) > 0) { + addParams!() + } + typ = par.typ + } + names = $.append(names, par.name) + } + } + if ($.len(names) > 0) { + addParams!() + } + return params + } + + public parseParameters(acceptTParams: boolean): ast.FieldList | null { + const p = this + using __defer = new $.DisposableStack(); + let tparams: ast.FieldList | null = null + let params: ast.FieldList | null = null + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "Parameters")) + }); + } + if (acceptTParams && p.tok == token.LBRACK) { + let opening = p.pos + p.next() + // [T any](params) syntax + let list = p.parseParameterList(null, null, token.RBRACK) + let rbrack = p.expect(token.RBRACK) + tparams = new ast.FieldList({Closing: rbrack, List: list, Opening: opening}) + // Type parameter lists must not be empty. + + // avoid follow-on errors + if (tparams!.NumFields() == 0) { + p.error(tparams!.Closing, "empty type parameter list") + tparams = null // avoid follow-on errors + } + } + let opening = p.expect(token.LPAREN) + let fields: $.Slice = null + if (p.tok != token.RPAREN) { + fields = p.parseParameterList(null, null, token.RPAREN) + } + let rparen = p.expect(token.RPAREN) + params = new ast.FieldList({Closing: rparen, List: fields, Opening: opening}) + return [tparams, params] + } + + public parseResult(): ast.FieldList | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "Result")) + }); + } + if (p.tok == token.LPAREN) { + let [, results] = p.parseParameters(false) + return results + } + let typ = p.tryIdentOrType() + if (typ != null) { + let list = $.makeSlice(1) + list![0] = new ast.Field({Type: typ}) + return new ast.FieldList({List: list}) + } + return null + } + + public parseFuncType(): ast.FuncType | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "FuncType")) + }); + } + let pos = p.expect(token.FUNC) + let [tparams, params] = p.parseParameters(true) + if (tparams != null) { + p.error(tparams!.Pos(), "function type must have no type parameters") + } + let results = p.parseResult() + return new ast.FuncType({Func: pos, Params: params, Results: results}) + } + + public parseMethodSpec(): ast.Field | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "MethodSpec")) + }); + } + let doc = p.leadComment + let idents: $.Slice = null + let typ: ast.Expr = null + let x = null + { + let { value: ident } = $.typeAssert(x, {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}) + if (ident != null) { + + // generic method or embedded instantiated type + + // generic method m[T any] + // + // Interface methods do not have type parameters. We parse them for a + // better error message and improved error recovery. + + // TODO(rfindley) refactor to share code with parseFuncType. + + // embedded instantiated type + // TODO(rfindley) should resolve all identifiers in x. + + // ordinary method + // TODO(rfindley) refactor to share code with parseFuncType. + + // embedded type + switch (true) { + case p.tok == token.LBRACK: + let lbrack = p.pos + p.next() + p.exprLev++ + let x = p.parseExpr() + p.exprLev-- + { + let { value: name0 } = $.typeAssert(x, {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}) + if (name0 != null && p.tok != token.COMMA && p.tok != token.RBRACK) { + // generic method m[T any] + // + // Interface methods do not have type parameters. We parse them for a + // better error message and improved error recovery. + /* _ = */ p.parseParameterList(name0, null, token.RBRACK) + /* _ = */ p.expect(token.RBRACK) + p.error(lbrack, "interface method must have no type parameters") + + // TODO(rfindley) refactor to share code with parseFuncType. + let [, params] = p.parseParameters(false) + let results = p.parseResult() + idents = $.arrayToSlice([ident]) + typ = new ast.FuncType({Func: token.NoPos, Params: params, Results: results}) + } + else { + // embedded instantiated type + // TODO(rfindley) should resolve all identifiers in x. + let list = $.arrayToSlice([x]) + if (p.atComma("type argument list", token.RBRACK)) { + p.exprLev++ + p.next() + for (; p.tok != token.RBRACK && p.tok != token.EOF; ) { + list = $.append(list, p.parseType()) + if (!p.atComma("type argument list", token.RBRACK)) { + break + } + p.next() + } + p.exprLev-- + } + let rbrack = p.expectClosing(token.RBRACK, "type argument list") + typ = packIndexExpr(ident, lbrack, list, rbrack) + } + } + break + case p.tok == token.LPAREN: + let [, params] = p.parseParameters(false) + let results = p.parseResult() + idents = $.arrayToSlice([ident]) + typ = new ast.FuncType({Func: token.NoPos, Params: params, Results: results}) + break + default: + typ = x + break + } + } + else { + // embedded, possibly instantiated type + typ = x + + // embedded instantiated interface + if (p.tok == token.LBRACK) { + // embedded instantiated interface + typ = p.parseTypeInstance(typ) + } + } + } + return new ast.Field({Doc: doc, Names: idents, Type: typ}) + } + + public embeddedElem(x: ast.Expr): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "EmbeddedElem")) + }); + } + if (x == null) { + x = p.embeddedTerm() + } + for (; p.tok == token.OR; ) { + let t = new ast.BinaryExpr() + t!.OpPos = p.pos + t!.Op = token.OR + p.next() + t!.X = x + t!.Y = p.embeddedTerm() + x = t + } + return x + } + + public embeddedTerm(): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "EmbeddedTerm")) + }); + } + if (p.tok == token.TILDE) { + let t = new ast.UnaryExpr() + t!.OpPos = p.pos + t!.Op = token.TILDE + p.next() + t!.X = p.parseType() + return t + } + let t = p.tryIdentOrType() + if (t == null) { + let pos = p.pos + p.errorExpected(pos, "~ term or type") + p.advance(exprEnd) + return new ast.BadExpr({From: pos, To: p.pos}) + } + return t + } + + public parseInterfaceType(): ast.InterfaceType | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "InterfaceType")) + }); + } + let pos = p.expect(token.INTERFACE) + let lbrace = p.expect(token.LBRACE) + let list: $.Slice = null + parseElements: for (; ; ) { + switch (true) { + case p.tok == token.IDENT: + let f = p.parseMethodSpec() + if (f!.Names == null) { + f!.Type = p.embeddedElem(f!.Type) + } + f!.Comment = p.expectSemi() + list = $.append(list, f) + break + case p.tok == token.TILDE: + let typ = null + let comment = p.expectSemi() + list = $.append(list, new ast.Field({Comment: comment, Type: typ})) + break + default: + { + let t = p.tryIdentOrType() + if (t != null) { + let typ = p.embeddedElem(t) + let comment = p.expectSemi() + list = $.append(list, new ast.Field({Comment: comment, Type: typ})) + } + else { + break + } + } + break + } + } + let rbrace = p.expect(token.RBRACE) + return new ast.InterfaceType({Interface: pos, Methods: new ast.FieldList({Closing: rbrace, List: list, Opening: lbrace})}) + } + + public parseMapType(): ast.MapType | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "MapType")) + }); + } + let pos = p.expect(token.MAP) + p.expect(token.LBRACK) + let key = p.parseType() + p.expect(token.RBRACK) + let value = p.parseType() + return new ast.MapType({Key: key, Map: pos, Value: value}) + } + + public parseChanType(): ast.ChanType | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "ChanType")) + }); + } + let pos = p.pos + let dir = (ast.SEND | ast.RECV) + let arrow: token.Pos = 0 + if (p.tok == token.CHAN) { + p.next() + if (p.tok == token.ARROW) { + arrow = p.pos + p.next() + dir = ast.SEND + } + } + else { + arrow = p.expect(token.ARROW) + p.expect(token.CHAN) + dir = ast.RECV + } + let value = p.parseType() + return new ast.ChanType({Arrow: arrow, Begin: pos, Dir: dir, Value: value}) + } + + public parseTypeInstance(typ: ast.Expr): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "TypeInstance")) + }); + } + let opening = p.expect(token.LBRACK) + p.exprLev++ + let list: $.Slice = null + for (; p.tok != token.RBRACK && p.tok != token.EOF; ) { + list = $.append(list, p.parseType()) + if (!p.atComma("type argument list", token.RBRACK)) { + break + } + p.next() + } + p.exprLev-- + let closing = p.expectClosing(token.RBRACK, "type argument list") + if ($.len(list) == 0) { + p.errorExpected(closing, "type argument list") + return new ast.IndexExpr({Index: new ast.BadExpr({From: opening + 1, To: closing}), Lbrack: opening, Rbrack: closing, X: typ}) + } + return packIndexExpr(typ, opening, list, closing) + } + + public tryIdentOrType(): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + __defer.defer(() => { + decNestLev(incNestLev(p)) + }); + switch (p.tok) { + case token.IDENT: + let typ = null + if (p.tok == token.LBRACK) { + typ = p.parseTypeInstance(typ) + } + return typ + break + case token.LBRACK: + let lbrack = p.expect(token.LBRACK) + return p.parseArrayType(lbrack, null) + break + case token.STRUCT: + return p.parseStructType() + break + case token.MUL: + return p.parsePointerType() + break + case token.FUNC: + return p.parseFuncType() + break + case token.INTERFACE: + return p.parseInterfaceType() + break + case token.MAP: + return p.parseMapType() + break + case token.CHAN: + case token.ARROW: + return p.parseChanType() + break + case token.LPAREN: + let lparen = p.pos + p.next() + let typ = p.parseType() + let rparen = p.expect(token.RPAREN) + return new ast.ParenExpr({Lparen: lparen, Rparen: rparen, X: typ}) + break + } + return null + } + + public parseStmtList(): $.Slice { + const p = this + using __defer = new $.DisposableStack(); + let list: $.Slice = null + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "StatementList")) + }); + } + for (; p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF; ) { + list = $.append(list, p.parseStmt()) + } + return list + } + + public parseBody(): ast.BlockStmt | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "Body")) + }); + } + let lbrace = p.expect(token.LBRACE) + let list = p.parseStmtList() + let rbrace = p.expect2(token.RBRACE) + return new ast.BlockStmt({Lbrace: lbrace, List: list, Rbrace: rbrace}) + } + + public parseBlockStmt(): ast.BlockStmt | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "BlockStmt")) + }); + } + let lbrace = p.expect(token.LBRACE) + let list = p.parseStmtList() + let rbrace = p.expect2(token.RBRACE) + return new ast.BlockStmt({Lbrace: lbrace, List: list, Rbrace: rbrace}) + } + + public parseFuncTypeOrLit(): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "FuncTypeOrLit")) + }); + } + let typ = p.parseFuncType() + if (p.tok != token.LBRACE) { + // function type only + return typ + } + p.exprLev++ + let body = p.parseBody() + p.exprLev-- + return new ast.FuncLit({Body: body, Type: typ}) + } + + // parseOperand may return an expression or a raw type (incl. array + // types of the form [...]T). Callers must verify the result. + public parseOperand(): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "Operand")) + }); + } + switch (p.tok) { + case token.IDENT: + let x = p.parseIdent() + return x + break + case token.INT: + case token.FLOAT: + case token.IMAG: + case token.CHAR: + case token.STRING: + let x = new ast.BasicLit({Kind: p.tok, Value: p.lit, ValuePos: p.pos}) + p.next() + return x + break + case token.LPAREN: + let lparen = p.pos + p.next() + p.exprLev++ + let x = p.parseRhs() // types may be parenthesized: (some type) + p.exprLev-- + let rparen = p.expect(token.RPAREN) + return new ast.ParenExpr({Lparen: lparen, Rparen: rparen, X: x}) + break + case token.FUNC: + return p.parseFuncTypeOrLit() + break + } + { + let typ = p.tryIdentOrType() + if (typ != null) { + // do not consume trailing type parameters + // could be type for composite literal or conversion + let { ok: isIdent } = $.typeAssert(typ, {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}) + assert(!isIdent, "type cannot be identifier") + return typ + } + } + let pos = p.pos + p.errorExpected(pos, "operand") + p.advance(stmtStart) + return new ast.BadExpr({From: pos, To: p.pos}) + } + + public parseSelector(x: ast.Expr): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "Selector")) + }); + } + let sel = p.parseIdent() + return new ast.SelectorExpr({Sel: sel, X: x}) + } + + public parseTypeAssertion(x: ast.Expr): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "TypeAssertion")) + }); + } + let lparen = p.expect(token.LPAREN) + let typ: ast.Expr = null + if (p.tok == token.TYPE) { + // type switch: typ == nil + p.next() + } + else { + typ = p.parseType() + } + let rparen = p.expect(token.RPAREN) + return new ast.TypeAssertExpr({Lparen: lparen, Rparen: rparen, Type: typ, X: x}) + } + + public parseIndexOrSliceOrInstance(x: ast.Expr): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "parseIndexOrSliceOrInstance")) + }); + } + let lbrack = p.expect(token.LBRACK) + if (p.tok == token.RBRACK) { + // empty index, slice or index expressions are not permitted; + // accept them for parsing tolerance, but complain + p.errorExpected(p.pos, "operand") + let rbrack = p.pos + p.next() + return new ast.IndexExpr({Index: new ast.BadExpr({From: rbrack, To: rbrack}), Lbrack: lbrack, Rbrack: rbrack, X: x}) + } + p.exprLev++ + // change the 3 to 2 to disable 3-index slices + let N: number = 3 + let args: $.Slice = null + let index: ast.Expr[] = [null, null, null] + let colons: token.Pos[] = [0, 0] + if (p.tok != token.COLON) { + // We can't know if we have an index expression or a type instantiation; + // so even if we see a (named) type we are not going to be in type context. + index![0] = p.parseRhs() + } + let ncolons = 0 + switch (p.tok) { + case token.COLON: + for (; p.tok == token.COLON && ncolons < $.len(colons); ) { + colons![ncolons] = p.pos + ncolons++ + p.next() + if (p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF) { + index![ncolons] = p.parseRhs() + } + } + break + case token.COMMA: + args = $.append(args, index![0]) + for (; p.tok == token.COMMA; ) { + p.next() + if (p.tok != token.RBRACK && p.tok != token.EOF) { + args = $.append(args, p.parseType()) + } + } + break + } + p.exprLev-- + let rbrack = p.expect(token.RBRACK) + if (ncolons > 0) { + // slice expression + let slice3 = false + + // Check presence of middle and final index here rather than during type-checking + // to prevent erroneous programs from passing through gofmt (was go.dev/issue/7305). + if (ncolons == 2) { + slice3 = true + // Check presence of middle and final index here rather than during type-checking + // to prevent erroneous programs from passing through gofmt (was go.dev/issue/7305). + if (index![1] == null) { + p.error(colons![0], "middle index required in 3-index slice") + index![1] = new ast.BadExpr({From: colons![0] + 1, To: colons![1]}) + } + if (index![2] == null) { + p.error(colons![1], "final index required in 3-index slice") + index![2] = new ast.BadExpr({From: colons![1] + 1, To: rbrack}) + } + } + return new ast.SliceExpr({High: index![1], Lbrack: lbrack, Low: index![0], Max: index![2], Rbrack: rbrack, Slice3: slice3, X: x}) + } + if ($.len(args) == 0) { + // index expression + return new ast.IndexExpr({Index: index![0], Lbrack: lbrack, Rbrack: rbrack, X: x}) + } + return packIndexExpr(x, lbrack, args, rbrack) + } + + public parseCallOrConversion(fun: ast.Expr): ast.CallExpr | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "CallOrConversion")) + }); + } + let lparen = p.expect(token.LPAREN) + p.exprLev++ + let list: $.Slice = null + let ellipsis: token.Pos = 0 + for (; p.tok != token.RPAREN && p.tok != token.EOF && !token.Pos_IsValid(ellipsis); ) { + list = $.append(list, p.parseRhs()) // builtins may expect a type: make(some type, ...) + if (p.tok == token.ELLIPSIS) { + ellipsis = p.pos + p.next() + } + if (!p.atComma("argument list", token.RPAREN)) { + break + } + p.next() + } + p.exprLev-- + let rparen = p.expectClosing(token.RPAREN, "argument list") + return new ast.CallExpr({Args: list, Ellipsis: ellipsis, Fun: fun, Lparen: lparen, Rparen: rparen}) + } + + public parseValue(): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "Element")) + }); + } + if (p.tok == token.LBRACE) { + return null + } + let x = p.parseExpr() + return x + } + + public parseElement(): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "Element")) + }); + } + let x = p.parseValue() + if (p.tok == token.COLON) { + let colon = p.pos + p.next() + x = new ast.KeyValueExpr({Colon: colon, Key: x, Value: p.parseValue()}) + } + return x + } + + public parseElementList(): $.Slice { + const p = this + using __defer = new $.DisposableStack(); + let list: $.Slice = null + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "ElementList")) + }); + } + for (; p.tok != token.RBRACE && p.tok != token.EOF; ) { + list = $.append(list, p.parseElement()) + if (!p.atComma("composite literal", token.RBRACE)) { + break + } + p.next() + } + return list + } + + public parseLiteralValue(typ: ast.Expr): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + __defer.defer(() => { + decNestLev(incNestLev(p)) + }); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "LiteralValue")) + }); + } + let lbrace = p.expect(token.LBRACE) + let elts: $.Slice = null + p.exprLev++ + if (p.tok != token.RBRACE) { + elts = p.parseElementList() + } + p.exprLev-- + let rbrace = p.expectClosing(token.RBRACE, "composite literal") + return new ast.CompositeLit({Elts: elts, Lbrace: lbrace, Rbrace: rbrace, Type: typ}) + } + + public parsePrimaryExpr(x: ast.Expr): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "PrimaryExpr")) + }); + } + if (x == null) { + x = p.parseOperand() + } + let n: number = 0 + __defer.defer(() => { + p.nestLev -= n + }); + for (n = 1; ; n++) { + incNestLev(p) + + // TODO(rFindley) The check for token.RBRACE below is a targeted fix + // to error recovery sufficient to make the x/tools tests to + // pass with the new parsing logic introduced for type + // parameters. Remove this once error recovery has been + // more generally reconsidered. + + // make progress + + // operand may have returned a parenthesized complit + // type; accept it but complain if we have a complit + + // determine if '{' belongs to a composite literal or a block statement + + // x is possibly a composite literal type + + // x is possibly a composite literal type + + // x is a composite literal type + + // already progressed, no need to advance + switch (p.tok) { + case token.PERIOD: + p.next() + switch (p.tok) { + case token.IDENT: + x = p.parseSelector(x) + break + case token.LPAREN: + x = p.parseTypeAssertion(x) + break + default: + let pos = p.pos + p.errorExpected(pos, "selector or type assertion") + if (p.tok != token.RBRACE) { + p.next() // make progress + } + let sel = new ast.Ident({Name: "_", NamePos: pos}) + x = new ast.SelectorExpr({Sel: sel, X: x}) + break + } + break + case token.LBRACK: + x = p.parseIndexOrSliceOrInstance(x) + break + case token.LPAREN: + x = p.parseCallOrConversion(x) + break + case token.LBRACE: + let t = ast.Unparen(x) + $.typeSwitch(t, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'ast.BadExpr'}, {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}, {kind: $.TypeKind.Pointer, elemType: 'ast.SelectorExpr'}], body: () => { + if (p.exprLev < 0) { + return x + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.IndexExpr'}, {kind: $.TypeKind.Pointer, elemType: 'ast.IndexListExpr'}], body: () => { + if (p.exprLev < 0) { + return x + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.ArrayType'}, {kind: $.TypeKind.Pointer, elemType: 'ast.StructType'}, {kind: $.TypeKind.Pointer, elemType: 'ast.MapType'}], body: () => {}}], () => { + return x + }) + if (t != x) { + + // already progressed, no need to advance + p.error(t!.Pos(), "cannot parenthesize type in composite literal") + // already progressed, no need to advance + } + x = p.parseLiteralValue(x) + break + default: + return x + break + } + } + } + + public parseUnaryExpr(): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + __defer.defer(() => { + decNestLev(incNestLev(p)) + }); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "UnaryExpr")) + }); + } + switch (p.tok) { + case token.ADD: + case token.SUB: + case token.NOT: + case token.XOR: + case token.AND: + case token.TILDE: + let [pos, op] = [p.pos, p.tok] + p.next() + let x = p.parseUnaryExpr() + return new ast.UnaryExpr({Op: op, OpPos: pos, X: x}) + break + case token.ARROW: + let arrow = p.pos + p.next() + let x = p.parseUnaryExpr() + { + let { value: typ, ok: ok } = $.typeAssert(x, {kind: $.TypeKind.Pointer, elemType: 'ast.ChanType'}) + if (ok) { + // (<-type) + + // re-associate position info and <- + let dir = ast.SEND + + // error: (<-type) is (<-(<-chan T)) + for (; ok && dir == ast.SEND; ) { + + // error: (<-type) is (<-(<-chan T)) + if (typ!.Dir == ast.RECV) { + // error: (<-type) is (<-(<-chan T)) + p.errorExpected(typ!.Arrow, "'chan'") + } + ;[arrow, typ!.Begin, typ!.Arrow] = [typ!.Arrow, arrow, arrow] + ;[dir, typ!.Dir] = [typ!.Dir, ast.RECV] + ({ value: typ, ok: ok } = $.typeAssert(typ!.Value, {kind: $.TypeKind.Pointer, elemType: 'ast.ChanType'})) + } + if (dir == ast.SEND) { + p.errorExpected(arrow, "channel type") + } + + return x + } + } + return new ast.UnaryExpr({Op: token.ARROW, OpPos: arrow, X: x}) + break + case token.MUL: + let pos = p.pos + p.next() + let x = p.parseUnaryExpr() + return new ast.StarExpr({Star: pos, X: x}) + break + } + return null + } + + public tokPrec(): [token.Token, number] { + const p = this + let tok = p.tok + if (p.inRhs && tok == token.ASSIGN) { + tok = token.EQL + } + return [tok, token.Token_Precedence(tok)] + } + + // parseBinaryExpr parses a (possibly) binary expression. + // If x is non-nil, it is used as the left operand. + // + // TODO(rfindley): parseBinaryExpr has become overloaded. Consider refactoring. + public parseBinaryExpr(x: ast.Expr, prec1: number): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "BinaryExpr")) + }); + } + if (x == null) { + x = p.parseUnaryExpr() + } + let n: number = 0 + __defer.defer(() => { + p.nestLev -= n + }); + for (n = 1; ; n++) { + incNestLev(p) + let [op, oprec] = p.tokPrec() + if (oprec < prec1) { + return x + } + let pos = p.expect(op) + let y = p.parseBinaryExpr(null, oprec + 1) + x = new ast.BinaryExpr({Op: op, OpPos: pos, X: x, Y: y}) + } + } + + // The result may be a type or even a raw type ([...]int). + public parseExpr(): ast.Expr { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "Expression")) + }); + } + return p.parseBinaryExpr(null, token.LowestPrec + 1) + } + + public parseRhs(): ast.Expr { + const p = this + let old = p.inRhs + p.inRhs = true + let x = p.parseExpr() + p.inRhs = old + return x + } + + // parseSimpleStmt returns true as 2nd result if it parsed the assignment + // of a range clause (with mode == rangeOk). The returned statement is an + // assignment with a right-hand side that is a single unary expression of + // the form "range x". No guarantees are given for the left-hand side. + public parseSimpleStmt(mode: number): [ast.Stmt, boolean] { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "SimpleStmt")) + }); + } + let x = p.parseList(false) + switch (p.tok) { + case token.DEFINE: + case token.ASSIGN: + case token.ADD_ASSIGN: + case token.SUB_ASSIGN: + case token.MUL_ASSIGN: + case token.QUO_ASSIGN: + case token.REM_ASSIGN: + case token.AND_ASSIGN: + case token.OR_ASSIGN: + case token.XOR_ASSIGN: + case token.SHL_ASSIGN: + case token.SHR_ASSIGN: + case token.AND_NOT_ASSIGN: + let [pos, tok] = [p.pos, p.tok] + p.next() + let y: $.Slice = null + let isRange = false + if (mode == 2 && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN)) { + let pos = p.pos + p.next() + y = $.arrayToSlice([new ast.UnaryExpr({Op: token.RANGE, OpPos: pos, X: p.parseRhs()})]) + isRange = true + } + else { + y = p.parseList(true) + } + return [new ast.AssignStmt({Lhs: x, Rhs: y, Tok: tok, TokPos: pos}), isRange] + break + } + if ($.len(x) > 1) { + + // continue with first expression + p.errorExpected(x![0]!.Pos(), "1 expression") + // continue with first expression + } + switch (p.tok) { + case token.COLON: + let colon = p.pos + p.next() + { + let { value: label, ok: isIdent } = $.typeAssert(x![0], {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}) + if (mode == 1 && isIdent) { + // Go spec: The scope of a label is the body of the function + // in which it is declared and excludes the body of any nested + // function. + let stmt = new ast.LabeledStmt({Colon: colon, Label: label, Stmt: p.parseStmt()}) + return [stmt, false] + } + } + p.error(colon, "illegal label declaration") + return [new ast.BadStmt({From: x![0]!.Pos(), To: colon + 1}), false] + break + case token.ARROW: + let arrow = p.pos + p.next() + let y = p.parseRhs() + return [new ast.SendStmt({Arrow: arrow, Chan: x![0], Value: y}), false] + break + case token.INC: + case token.DEC: + let s = new ast.IncDecStmt({Tok: p.tok, TokPos: p.pos, X: x![0]}) + p.next() + return [s, false] + break + } + return [new ast.ExprStmt({X: x![0]}), false] + } + + public parseCallExpr(callType: string): ast.CallExpr | null { + const p = this + let x = p.parseRhs() // could be a conversion: (some type)(x) + { + let t = ast.Unparen(x) + if (t != x) { + p.error(x!.Pos(), fmt.Sprintf("expression in %s must not be parenthesized", callType)) + x = t + } + } + { + let { value: call, ok: isCall } = $.typeAssert(x, {kind: $.TypeKind.Pointer, elemType: 'ast.CallExpr'}) + if (isCall) { + return call + } + } + { + let { ok: isBad } = $.typeAssert(x, {kind: $.TypeKind.Pointer, elemType: 'ast.BadExpr'}) + if (!isBad) { + // only report error if it's a new one + p.error(p.safePos(x!.End()), fmt.Sprintf("expression in %s must be function call", callType)) + } + } + return null + } + + public parseGoStmt(): ast.Stmt { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "GoStmt")) + }); + } + let pos = p.expect(token.GO) + let call = p.parseCallExpr("go") + p.expectSemi() + if (call == null) { + return new ast.BadStmt({From: pos, To: pos + 2}) + } + return new ast.GoStmt({Call: call, Go: pos}) + } + + public parseDeferStmt(): ast.Stmt { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "DeferStmt")) + }); + } + let pos = p.expect(token.DEFER) + let call = p.parseCallExpr("defer") + p.expectSemi() + if (call == null) { + return new ast.BadStmt({From: pos, To: pos + 5}) + } + return new ast.DeferStmt({Call: call, Defer: pos}) + } + + public parseReturnStmt(): ast.ReturnStmt | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "ReturnStmt")) + }); + } + let pos = p.pos + p.expect(token.RETURN) + let x: $.Slice = null + if (p.tok != token.SEMICOLON && p.tok != token.RBRACE) { + x = p.parseList(true) + } + p.expectSemi() + return new ast.ReturnStmt({Results: x, Return: pos}) + } + + public parseBranchStmt(tok: token.Token): ast.BranchStmt | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "BranchStmt")) + }); + } + let pos = p.expect(tok) + let label: ast.Ident | null = null + if (tok != token.FALLTHROUGH && p.tok == token.IDENT) { + label = p.parseIdent() + } + p.expectSemi() + return new ast.BranchStmt({Label: label, Tok: tok, TokPos: pos}) + } + + public makeExpr(s: ast.Stmt, want: string): ast.Expr { + const p = this + if (s == null) { + return null + } + { + let { value: es, ok: isExpr } = $.typeAssert(s, {kind: $.TypeKind.Pointer, elemType: 'ast.ExprStmt'}) + if (isExpr) { + return es!.X + } + } + let found = "simple statement" + { + let { ok: isAss } = $.typeAssert(s, {kind: $.TypeKind.Pointer, elemType: 'ast.AssignStmt'}) + if (isAss) { + found = "assignment" + } + } + p.error(s!.Pos(), fmt.Sprintf("expected %s, found %s (missing parentheses around composite literal?)", want, found)) + return new ast.BadExpr({From: s!.Pos(), To: p.safePos(s!.End())}) + } + + // parseIfHeader is an adjusted version of parser.header + // in cmd/compile/internal/syntax/parser.go, which has + // been tuned for better error handling. + public parseIfHeader(): [ast.Stmt, ast.Expr] { + const p = this + let init: ast.Stmt = null + let cond: ast.Expr = null + if (p.tok == token.LBRACE) { + p.error(p.pos, "missing condition in if statement") + cond = new ast.BadExpr({From: p.pos, To: p.pos}) + return [init, cond] + } + let prevLev = p.exprLev + p.exprLev = -1 + if (p.tok != token.SEMICOLON) { + // accept potential variable declaration but complain + if (p.tok == token.VAR) { + p.next() + p.error(p.pos, "var declaration not allowed in if initializer") + } + ;[init] = p.parseSimpleStmt(0) + } + let condStmt: ast.Stmt = null + let semi: { pos?: token.Pos; lit?: string } = {} + if (p.tok != token.LBRACE) { + if (p.tok == token.SEMICOLON) { + semi.pos = p.pos + semi.lit = p.lit + p.next() + } + else { + p.expect(token.SEMICOLON) + } + if (p.tok != token.LBRACE) { + ;[condStmt] = p.parseSimpleStmt(0) + } + } + else { + condStmt = init + init = null + } + if (condStmt != null) { + cond = p.makeExpr(condStmt, "boolean expression") + } + else if (token.Pos_IsValid(semi.pos)) { + if (semi.lit == "\n") { + p.error(semi.pos, "unexpected newline, expecting { after if clause") + } + else { + p.error(semi.pos, "missing condition in if statement") + } + } + if (cond == null) { + cond = new ast.BadExpr({From: p.pos, To: p.pos}) + } + p.exprLev = prevLev + return [init, cond] + } + + public parseIfStmt(): ast.IfStmt | null { + const p = this + using __defer = new $.DisposableStack(); + __defer.defer(() => { + decNestLev(incNestLev(p)) + }); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "IfStmt")) + }); + } + let pos = p.expect(token.IF) + let [init, cond] = p.parseIfHeader() + let body = p.parseBlockStmt() + let else_: ast.Stmt = null + if (p.tok == token.ELSE) { + p.next() + switch (p.tok) { + case token.IF: + else_ = p.parseIfStmt() + break + case token.LBRACE: + else_ = p.parseBlockStmt() + p.expectSemi() + break + default: + p.errorExpected(p.pos, "if statement or block") + else_ = new ast.BadStmt({From: p.pos, To: p.pos}) + break + } + } + else { + p.expectSemi() + } + return new ast.IfStmt({Body: body, Cond: cond, Else: else_, If: pos, Init: init}) + } + + public parseCaseClause(): ast.CaseClause | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "CaseClause")) + }); + } + let pos = p.pos + let list: $.Slice = null + if (p.tok == token.CASE) { + p.next() + list = p.parseList(true) + } + else { + p.expect(token.DEFAULT) + } + let colon = p.expect(token.COLON) + let body = p.parseStmtList() + return new ast.CaseClause({Body: body, Case: pos, Colon: colon, List: list}) + } + + public isTypeSwitchGuard(s: ast.Stmt): boolean { + const p = this + $.typeSwitch(s, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'ast.ExprStmt'}], body: (t) => { + return isTypeSwitchAssert(t!.X) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.AssignStmt'}], body: (t) => { + if ($.len(t!.Lhs) == 1 && $.len(t!.Rhs) == 1 && isTypeSwitchAssert(t!.Rhs![0])) { + + // permit v = x.(type) but complain + switch (t!.Tok) { + case token.ASSIGN: + p.error(t!.TokPos, "expected ':=', found '='") + // fallthrough // fallthrough statement skipped + break + case token.DEFINE: + return true + break + } + } + }}]) + return false + } + + public parseSwitchStmt(): ast.Stmt { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "SwitchStmt")) + }); + } + let pos = p.expect(token.SWITCH) + let s1: ast.Stmt = null + let s2: ast.Stmt = null + if (p.tok != token.LBRACE) { + let prevLev = p.exprLev + p.exprLev = -1 + if (p.tok != token.SEMICOLON) { + ;[s2] = p.parseSimpleStmt(0) + } + + // A TypeSwitchGuard may declare a variable in addition + // to the variable declared in the initial SimpleStmt. + // Introduce extra scope to avoid redeclaration errors: + // + // switch t := 0; t := x.(T) { ... } + // + // (this code is not valid Go because the first t + // cannot be accessed and thus is never used, the extra + // scope is needed for the correct error message). + // + // If we don't have a type switch, s2 must be an expression. + // Having the extra nested but empty scope won't affect it. + if (p.tok == token.SEMICOLON) { + p.next() + s1 = s2 + s2 = null + + // A TypeSwitchGuard may declare a variable in addition + // to the variable declared in the initial SimpleStmt. + // Introduce extra scope to avoid redeclaration errors: + // + // switch t := 0; t := x.(T) { ... } + // + // (this code is not valid Go because the first t + // cannot be accessed and thus is never used, the extra + // scope is needed for the correct error message). + // + // If we don't have a type switch, s2 must be an expression. + // Having the extra nested but empty scope won't affect it. + if (p.tok != token.LBRACE) { + // A TypeSwitchGuard may declare a variable in addition + // to the variable declared in the initial SimpleStmt. + // Introduce extra scope to avoid redeclaration errors: + // + // switch t := 0; t := x.(T) { ... } + // + // (this code is not valid Go because the first t + // cannot be accessed and thus is never used, the extra + // scope is needed for the correct error message). + // + // If we don't have a type switch, s2 must be an expression. + // Having the extra nested but empty scope won't affect it. + ;[s2] = p.parseSimpleStmt(0) + } + } + p.exprLev = prevLev + } + let typeSwitch = p.isTypeSwitchGuard(s2) + let lbrace = p.expect(token.LBRACE) + let list: $.Slice = null + for (; p.tok == token.CASE || p.tok == token.DEFAULT; ) { + list = $.append(list, p.parseCaseClause()) + } + let rbrace = p.expect(token.RBRACE) + p.expectSemi() + let body = new ast.BlockStmt({Lbrace: lbrace, List: list, Rbrace: rbrace}) + if (typeSwitch) { + return new ast.TypeSwitchStmt({Assign: s2, Body: body, Init: s1, Switch: pos}) + } + return new ast.SwitchStmt({Body: body, Init: s1, Switch: pos, Tag: p.makeExpr(s2, "switch expression")}) + } + + public parseCommClause(): ast.CommClause | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "CommClause")) + }); + } + let pos = p.pos + let comm: ast.Stmt = null + if (p.tok == token.CASE) { + p.next() + let lhs = p.parseList(false) + + // SendStmt + + // continue with first expression + + // RecvStmt + + // RecvStmt with assignment + + // continue with first two expressions + + // lhs must be single receive operation + + // continue with first expression + if (p.tok == token.ARROW) { + // SendStmt + + // continue with first expression + if ($.len(lhs) > 1) { + + // continue with first expression + p.errorExpected(lhs![0]!.Pos(), "1 expression") + // continue with first expression + } + let arrow = p.pos + p.next() + let rhs = p.parseRhs() + comm = new ast.SendStmt({Arrow: arrow, Chan: lhs![0], Value: rhs}) + } + else { + // RecvStmt + + // RecvStmt with assignment + + // continue with first two expressions + + // lhs must be single receive operation + + // continue with first expression + { + let tok = p.tok + if (tok == token.ASSIGN || tok == token.DEFINE) { + // RecvStmt with assignment + + // continue with first two expressions + if ($.len(lhs) > 2) { + p.errorExpected(lhs![0]!.Pos(), "1 or 2 expressions") + // continue with first two expressions + lhs = $.goSlice(lhs, 0, 2) + } + let pos = p.pos + p.next() + let rhs = p.parseRhs() + comm = new ast.AssignStmt({Lhs: lhs, Rhs: $.arrayToSlice([rhs]), Tok: tok, TokPos: pos}) + } + else { + // lhs must be single receive operation + + // continue with first expression + if ($.len(lhs) > 1) { + + // continue with first expression + p.errorExpected(lhs![0]!.Pos(), "1 expression") + // continue with first expression + } + comm = new ast.ExprStmt({X: lhs![0]}) + } + } + } + } + else { + p.expect(token.DEFAULT) + } + let colon = p.expect(token.COLON) + let body = p.parseStmtList() + return new ast.CommClause({Body: body, Case: pos, Colon: colon, Comm: comm}) + } + + public parseSelectStmt(): ast.SelectStmt | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "SelectStmt")) + }); + } + let pos = p.expect(token.SELECT) + let lbrace = p.expect(token.LBRACE) + let list: $.Slice = null + for (; p.tok == token.CASE || p.tok == token.DEFAULT; ) { + list = $.append(list, p.parseCommClause()) + } + let rbrace = p.expect(token.RBRACE) + p.expectSemi() + let body = new ast.BlockStmt({Lbrace: lbrace, List: list, Rbrace: rbrace}) + return new ast.SelectStmt({Body: body, Select: pos}) + } + + public parseForStmt(): ast.Stmt { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "ForStmt")) + }); + } + let pos = p.expect(token.FOR) + let s1: ast.Stmt = null + let s2: ast.Stmt = null + let s3: ast.Stmt = null + let isRange: boolean = false + if (p.tok != token.LBRACE) { + let prevLev = p.exprLev + p.exprLev = -1 + + // "for range x" (nil lhs in assignment) + if (p.tok != token.SEMICOLON) { + + // "for range x" (nil lhs in assignment) + if (p.tok == token.RANGE) { + // "for range x" (nil lhs in assignment) + let pos = p.pos + p.next() + let y = $.arrayToSlice([new ast.UnaryExpr({Op: token.RANGE, OpPos: pos, X: p.parseRhs()})]) + s2 = new ast.AssignStmt({Rhs: y}) + isRange = true + } + else { + ;[s2, isRange] = p.parseSimpleStmt(2) + } + } + if (!isRange && p.tok == token.SEMICOLON) { + p.next() + s1 = s2 + s2 = null + if (p.tok != token.SEMICOLON) { + ;[s2] = p.parseSimpleStmt(0) + } + p.expectSemi() + if (p.tok != token.LBRACE) { + ;[s3] = p.parseSimpleStmt(0) + } + } + p.exprLev = prevLev + } + let body = p.parseBlockStmt() + p.expectSemi() + if (isRange) { + let _as = $.mustTypeAssert(s2, {kind: $.TypeKind.Pointer, elemType: 'ast.AssignStmt'}) + // check lhs + let key: ast.Expr = null + let value: ast.Expr = null + + // nothing to do + switch ($.len(_as!.Lhs)) { + case 0: + break + case 1: + key = _as!.Lhs![0] + break + case 2: + ;[key, value] = [_as!.Lhs![0], _as!.Lhs![1]] + break + default: + p.errorExpected(_as!.Lhs![$.len(_as!.Lhs) - 1]!.Pos(), "at most 2 expressions") + return new ast.BadStmt({From: pos, To: p.safePos(body!.End())}) + break + } + // parseSimpleStmt returned a right-hand side that + // is a single unary expression of the form "range x" + let x = $.mustTypeAssert(_as!.Rhs![0], {kind: $.TypeKind.Pointer, elemType: 'ast.UnaryExpr'})!.X + return new ast.RangeStmt({Body: body, For: pos, Key: key, Range: _as!.Rhs![0]!.Pos(), Tok: _as!.Tok, TokPos: _as!.TokPos, Value: value, X: x}) + } + return new ast.ForStmt({Body: body, Cond: p.makeExpr(s2, "boolean or range expression"), For: pos, Init: s1, Post: s3}) + } + + public parseStmt(): ast.Stmt { + const p = this + using __defer = new $.DisposableStack(); + let s: ast.Stmt = null + __defer.defer(() => { + decNestLev(incNestLev(p)) + }); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "Statement")) + }); + } + switch (p.tok) { + case token.CONST: + case token.TYPE: + case token.VAR: + s = new ast.DeclStmt({Decl: p.parseDecl(stmtStart)}) + break + case token.IDENT: + case token.INT: + case token.FLOAT: + case token.IMAG: + case token.CHAR: + case token.STRING: + case token.FUNC: + case token.LPAREN: + case token.LBRACK: + case token.STRUCT: + case token.MAP: + case token.CHAN: + case token.INTERFACE: + case token.ADD: + case token.SUB: + case token.MUL: + case token.AND: + case token.XOR: + case token.ARROW: + case token.NOT: + ;[s] = p.parseSimpleStmt(1) + { + let { ok: isLabeledStmt } = $.typeAssert(s, {kind: $.TypeKind.Pointer, elemType: 'ast.LabeledStmt'}) + if (!isLabeledStmt) { + p.expectSemi() + } + } + break + case token.GO: + s = p.parseGoStmt() + break + case token.DEFER: + s = p.parseDeferStmt() + break + case token.RETURN: + s = p.parseReturnStmt() + break + case token.BREAK: + case token.CONTINUE: + case token.GOTO: + case token.FALLTHROUGH: + s = p.parseBranchStmt(p.tok) + break + case token.LBRACE: + s = p.parseBlockStmt() + p.expectSemi() + break + case token.IF: + s = p.parseIfStmt() + break + case token.SWITCH: + s = p.parseSwitchStmt() + break + case token.SELECT: + s = p.parseSelectStmt() + break + case token.FOR: + s = p.parseForStmt() + break + case token.SEMICOLON: + s = new ast.EmptyStmt({Implicit: p.lit == "\n", Semicolon: p.pos}) + p.next() + break + case token.RBRACE: + s = new ast.EmptyStmt({Implicit: true, Semicolon: p.pos}) + break + default: + let pos = p.pos + p.errorExpected(pos, "statement") + p.advance(stmtStart) + s = new ast.BadStmt({From: pos, To: p.pos}) + break + } + return s + } + + public parseImportSpec(doc: ast.CommentGroup | null, _: token.Token, _: number): ast.Spec { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "ImportSpec")) + }); + } + let ident: ast.Ident | null = null + switch (p.tok) { + case token.IDENT: + ident = p.parseIdent() + break + case token.PERIOD: + ident = new ast.Ident({Name: ".", NamePos: p.pos}) + p.next() + break + } + let pos = p.pos + let path: string = "" + if (p.tok == token.STRING) { + path = p.lit + p.next() + } + else if (token.Token_IsLiteral(p.tok)) { + p.error(pos, "import path must be a string") + p.next() + } + else { + p.error(pos, "missing import path") + p.advance(exprEnd) + } + let comment = p.expectSemi() + let spec = new ast.ImportSpec({Comment: comment, Doc: doc, Name: ident, Path: new ast.BasicLit({Kind: token.STRING, Value: path, ValuePos: pos})}) + p.imports = $.append(p.imports, spec) + return spec + } + + public parseValueSpec(doc: ast.CommentGroup | null, keyword: token.Token, iota: number): ast.Spec { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, token.Token_String(keyword) + "Spec")) + }); + } + let idents = p.parseIdentList() + let typ: ast.Expr = null + let values: $.Slice = null + switch (keyword) { + case token.CONST: + if (p.tok != token.EOF && p.tok != token.SEMICOLON && p.tok != token.RPAREN) { + typ = p.tryIdentOrType() + if (p.tok == token.ASSIGN) { + p.next() + values = p.parseList(true) + } + } + break + case token.VAR: + if (p.tok != token.ASSIGN) { + typ = p.parseType() + } + if (p.tok == token.ASSIGN) { + p.next() + values = p.parseList(true) + } + break + default: + $.panic("unreachable") + break + } + let comment = p.expectSemi() + let spec = new ast.ValueSpec({Comment: comment, Doc: doc, Names: idents, Type: typ, Values: values}) + return spec + } + + public parseGenericType(spec: ast.TypeSpec | null, openPos: token.Pos, name0: ast.Ident | null, typ0: ast.Expr): void { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "parseGenericType")) + }); + } + let list = p.parseParameterList(name0, typ0, token.RBRACK) + let closePos = p.expect(token.RBRACK) + spec!.TypeParams = new ast.FieldList({Closing: closePos, List: list, Opening: openPos}) + if (p.tok == token.ASSIGN) { + // type alias + spec!.Assign = p.pos + p.next() + } + spec!.Type = p.parseType() + } + + public parseTypeSpec(doc: ast.CommentGroup | null, _: token.Token, _: number): ast.Spec { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "TypeSpec")) + }); + } + let name = p.parseIdent() + let spec = new ast.TypeSpec({Doc: doc, Name: name}) + if (p.tok == token.LBRACK) { + // spec.Name "[" ... + // array/slice type or type parameter list + let lbrack = p.pos + p.next() + + // We may have an array type or a type parameter list. + // In either case we expect an expression x (which may + // just be a name, or a more complex expression) which + // we can analyze further. + // + // A type parameter list may have a type bound starting + // with a "[" as in: P []E. In that case, simply parsing + // an expression would lead to an error: P[] is invalid. + // But since index or slice expressions are never constant + // and thus invalid array length expressions, if the name + // is followed by "[" it must be the start of an array or + // slice constraint. Only if we don't see a "[" do we + // need to parse a full expression. Notably, name <- x + // is not a concern because name <- x is a statement and + // not an expression. + + // To parse the expression starting with name, expand + // the call sequence we would get by passing in name + // to parser.expr, and pass in name to parsePrimaryExpr. + + // Analyze expression x. If we can split x into a type parameter + // name, possibly followed by a type parameter type, we consider + // this the start of a type parameter list, with some caveats: + // a single name followed by "]" tilts the decision towards an + // array declaration; a type parameter type that could also be + // an ordinary expression but which is followed by a comma tilts + // the decision towards a type parameter list. + + // spec.Name "[" pname ... + // spec.Name "[" pname ptype ... + // spec.Name "[" pname ptype "," ... + // ptype may be nil + + // spec.Name "[" pname "]" ... + // spec.Name "[" x ... + + // array type + if (p.tok == token.IDENT) { + // We may have an array type or a type parameter list. + // In either case we expect an expression x (which may + // just be a name, or a more complex expression) which + // we can analyze further. + // + // A type parameter list may have a type bound starting + // with a "[" as in: P []E. In that case, simply parsing + // an expression would lead to an error: P[] is invalid. + // But since index or slice expressions are never constant + // and thus invalid array length expressions, if the name + // is followed by "[" it must be the start of an array or + // slice constraint. Only if we don't see a "[" do we + // need to parse a full expression. Notably, name <- x + // is not a concern because name <- x is a statement and + // not an expression. + let x: ast.Expr = p.parseIdent() + + // To parse the expression starting with name, expand + // the call sequence we would get by passing in name + // to parser.expr, and pass in name to parsePrimaryExpr. + if (p.tok != token.LBRACK) { + // To parse the expression starting with name, expand + // the call sequence we would get by passing in name + // to parser.expr, and pass in name to parsePrimaryExpr. + p.exprLev++ + let lhs = p.parsePrimaryExpr(x) + x = p.parseBinaryExpr(lhs, token.LowestPrec + 1) + p.exprLev-- + } + // Analyze expression x. If we can split x into a type parameter + // name, possibly followed by a type parameter type, we consider + // this the start of a type parameter list, with some caveats: + // a single name followed by "]" tilts the decision towards an + // array declaration; a type parameter type that could also be + // an ordinary expression but which is followed by a comma tilts + // the decision towards a type parameter list. + + // spec.Name "[" pname ... + // spec.Name "[" pname ptype ... + // spec.Name "[" pname ptype "," ... + // ptype may be nil + + // spec.Name "[" pname "]" ... + // spec.Name "[" x ... + { + let [pname, ptype] = extractName(x, p.tok == token.COMMA) + if (pname != null && (ptype != null || p.tok != token.RBRACK)) { + // spec.Name "[" pname ... + // spec.Name "[" pname ptype ... + // spec.Name "[" pname ptype "," ... + p.parseGenericType(spec, lbrack, pname, ptype) // ptype may be nil + } + else { + // spec.Name "[" pname "]" ... + // spec.Name "[" x ... + spec!.Type = p.parseArrayType(lbrack, x) + } + } + } + else { + // array type + spec!.Type = p.parseArrayType(lbrack, null) + } + } + else { + // no type parameters + + // type alias + if (p.tok == token.ASSIGN) { + // type alias + spec!.Assign = p.pos + p.next() + } + spec!.Type = p.parseType() + } + spec!.Comment = p.expectSemi() + return spec + } + + public parseGenDecl(keyword: token.Token, f: parseSpecFunction | null): ast.GenDecl | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "GenDecl(" + token.Token_String(keyword) + ")")) + }); + } + let doc = p.leadComment + let pos = p.expect(keyword) + let lparen: token.Pos = 0 + let rparen: token.Pos = 0 + let list: $.Slice = null + if (p.tok == token.LPAREN) { + lparen = p.pos + p.next() + for (let iota = 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++) { + list = $.append(list, f!(p.leadComment, keyword, iota)) + } + rparen = p.expect(token.RPAREN) + p.expectSemi() + } + else { + list = $.append(list, f!(null, keyword, 0)) + } + return new ast.GenDecl({Doc: doc, Lparen: lparen, Rparen: rparen, Specs: list, Tok: keyword, TokPos: pos}) + } + + public parseFuncDecl(): ast.FuncDecl | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "FunctionDecl")) + }); + } + let doc = p.leadComment + let pos = p.expect(token.FUNC) + let recv: ast.FieldList | null = null + if (p.tok == token.LPAREN) { + ;[, recv] = p.parseParameters(false) + } + let ident = p.parseIdent() + let [tparams, params] = p.parseParameters(true) + if (recv != null && tparams != null) { + // Method declarations do not have type parameters. We parse them for a + // better error message and improved error recovery. + p.error(tparams!.Opening, "method must have no type parameters") + tparams = null + } + let results = p.parseResult() + let body: ast.BlockStmt | null = null + switch (p.tok) { + case token.LBRACE: + body = p.parseBody() + p.expectSemi() + break + case token.SEMICOLON: + p.next() + if (p.tok == token.LBRACE) { + // opening { of function declaration on next line + p.error(p.pos, "unexpected semicolon or newline before {") + body = p.parseBody() + p.expectSemi() + } + break + default: + p.expectSemi() + break + } + let decl = new ast.FuncDecl({Body: body, Doc: doc, Name: ident, Recv: recv, Type: new ast.FuncType({Func: pos, Params: params, Results: results, TypeParams: tparams})}) + return decl + } + + public parseDecl(sync: Map | null): ast.Decl { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "Declaration")) + }); + } + let f: parseSpecFunction | null = null + switch (p.tok) { + case token.IMPORT: + f = p!.parseImportSpec.bind(p!) + break + case token.CONST: + case token.VAR: + f = p!.parseValueSpec.bind(p!) + break + case token.TYPE: + f = p!.parseTypeSpec.bind(p!) + break + case token.FUNC: + return p.parseFuncDecl() + break + default: + let pos = p.pos + p.errorExpected(pos, "declaration") + p.advance(sync) + return new ast.BadDecl({From: pos, To: p.pos}) + break + } + return p.parseGenDecl(p.tok, f) + } + + public parseFile(): ast.File | null { + const p = this + using __defer = new $.DisposableStack(); + if (p.trace) { + using __defer = new $.DisposableStack(); + __defer.defer(() => { + un(trace(p, "File")) + }); + } + if (scanner.ErrorList_Len(p.errors) != 0) { + return null + } + let doc = p.leadComment + let pos = p.expect(token.PACKAGE) + let ident = p.parseIdent() + if (ident!.Name == "_" && (p.mode & 16) != 0) { + p.error(p.pos, "invalid package name _") + } + p.expectSemi() + if (scanner.ErrorList_Len(p.errors) != 0) { + return null + } + let decls: $.Slice = null + if ((p.mode & 1) == 0) { + // import decls + for (; p.tok == token.IMPORT; ) { + decls = $.append(decls, p.parseGenDecl(token.IMPORT, p!.parseImportSpec.bind(p!))) + } + + // rest of package body + + // Continue to accept import declarations for error tolerance, but complain. + if ((p.mode & 2) == 0) { + // rest of package body + let prev = token.IMPORT + + // Continue to accept import declarations for error tolerance, but complain. + for (; p.tok != token.EOF; ) { + // Continue to accept import declarations for error tolerance, but complain. + if (p.tok == token.IMPORT && prev != token.IMPORT) { + p.error(p.pos, "imports must appear before other declarations") + } + prev = p.tok + + decls = $.append(decls, p.parseDecl(declStart)) + } + } + } + let f = new ast.File({Comments: p.comments, Decls: decls, Doc: doc, GoVersion: p.goVersion, Imports: p.imports, Name: ident, Package: pos}) + let declErr: ((p0: token.Pos, p1: string) => void) | null = null + if ((p.mode & 16) != 0) { + declErr = p!.error.bind(p!) + } + if ((p.mode & 64) == 0) { + resolveFile(f, p.file, declErr) + } + return f + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'parser', + new parser(), + [{ name: "init", args: [{ name: "file", type: { kind: $.TypeKind.Pointer, elemType: "File" } }, { name: "src", type: { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Basic, name: "number" } } }, { name: "mode", type: "Mode" }], returns: [] }, { name: "printTrace", args: [{ name: "a", type: { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Interface, methods: [] } } }], returns: [] }, { name: "next0", args: [], returns: [] }, { name: "consumeComment", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "Comment" } }, { type: { kind: $.TypeKind.Basic, name: "number" } }] }, { name: "consumeCommentGroup", args: [{ name: "n", type: { kind: $.TypeKind.Basic, name: "number" } }], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "CommentGroup" } }, { type: { kind: $.TypeKind.Basic, name: "number" } }] }, { name: "next", args: [], returns: [] }, { name: "error", args: [{ name: "pos", type: "Pos" }, { name: "msg", type: { kind: $.TypeKind.Basic, name: "string" } }], returns: [] }, { name: "errorExpected", args: [{ name: "pos", type: "Pos" }, { name: "msg", type: { kind: $.TypeKind.Basic, name: "string" } }], returns: [] }, { name: "expect", args: [{ name: "tok", type: "Token" }], returns: [{ type: "Pos" }] }, { name: "expect2", args: [{ name: "tok", type: "Token" }], returns: [{ type: "Pos" }] }, { name: "expectClosing", args: [{ name: "tok", type: "Token" }, { name: "context", type: { kind: $.TypeKind.Basic, name: "string" } }], returns: [{ type: "Pos" }] }, { name: "expectSemi", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "CommentGroup" } }] }, { name: "atComma", args: [{ name: "context", type: { kind: $.TypeKind.Basic, name: "string" } }, { name: "follow", type: "Token" }], returns: [{ type: { kind: $.TypeKind.Basic, name: "boolean" } }] }, { name: "advance", args: [{ name: "to", type: { kind: $.TypeKind.Map, keyType: "Token", elemType: { kind: $.TypeKind.Basic, name: "boolean" } } }], returns: [] }, { name: "safePos", args: [{ name: "pos", type: "Pos" }], returns: [{ type: "Pos" }] }, { name: "parseIdent", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "Ident" } }] }, { name: "parseIdentList", args: [], returns: [{ type: { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "Ident" } } }] }, { name: "parseExprList", args: [], returns: [{ type: { kind: $.TypeKind.Slice, elemType: "Expr" } }] }, { name: "parseList", args: [{ name: "inRhs", type: { kind: $.TypeKind.Basic, name: "boolean" } }], returns: [{ type: { kind: $.TypeKind.Slice, elemType: "Expr" } }] }, { name: "parseType", args: [], returns: [{ type: "Expr" }] }, { name: "parseQualifiedIdent", args: [{ name: "ident", type: { kind: $.TypeKind.Pointer, elemType: "Ident" } }], returns: [{ type: "Expr" }] }, { name: "parseTypeName", args: [{ name: "ident", type: { kind: $.TypeKind.Pointer, elemType: "Ident" } }], returns: [{ type: "Expr" }] }, { name: "parseArrayType", args: [{ name: "lbrack", type: "Pos" }, { name: "len", type: "Expr" }], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "ArrayType" } }] }, { name: "parseArrayFieldOrTypeInstance", args: [{ name: "x", type: { kind: $.TypeKind.Pointer, elemType: "Ident" } }], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "Ident" } }, { type: "Expr" }] }, { name: "parseFieldDecl", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "Field" } }] }, { name: "parseStructType", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "StructType" } }] }, { name: "parsePointerType", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "StarExpr" } }] }, { name: "parseDotsType", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "Ellipsis" } }] }, { name: "parseParamDecl", args: [{ name: "name", type: { kind: $.TypeKind.Pointer, elemType: "Ident" } }, { name: "typeSetsOK", type: { kind: $.TypeKind.Basic, name: "boolean" } }], returns: [{ type: "field" }] }, { name: "parseParameterList", args: [{ name: "name0", type: { kind: $.TypeKind.Pointer, elemType: "Ident" } }, { name: "typ0", type: "Expr" }, { name: "closing", type: "Token" }], returns: [{ type: { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "Field" } } }] }, { name: "parseParameters", args: [{ name: "acceptTParams", type: { kind: $.TypeKind.Basic, name: "boolean" } }], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "FieldList" } }, { type: { kind: $.TypeKind.Pointer, elemType: "FieldList" } }] }, { name: "parseResult", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "FieldList" } }] }, { name: "parseFuncType", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "FuncType" } }] }, { name: "parseMethodSpec", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "Field" } }] }, { name: "embeddedElem", args: [{ name: "x", type: "Expr" }], returns: [{ type: "Expr" }] }, { name: "embeddedTerm", args: [], returns: [{ type: "Expr" }] }, { name: "parseInterfaceType", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "InterfaceType" } }] }, { name: "parseMapType", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "MapType" } }] }, { name: "parseChanType", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "ChanType" } }] }, { name: "parseTypeInstance", args: [{ name: "typ", type: "Expr" }], returns: [{ type: "Expr" }] }, { name: "tryIdentOrType", args: [], returns: [{ type: "Expr" }] }, { name: "parseStmtList", args: [], returns: [{ type: { kind: $.TypeKind.Slice, elemType: "Stmt" } }] }, { name: "parseBody", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "BlockStmt" } }] }, { name: "parseBlockStmt", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "BlockStmt" } }] }, { name: "parseFuncTypeOrLit", args: [], returns: [{ type: "Expr" }] }, { name: "parseOperand", args: [], returns: [{ type: "Expr" }] }, { name: "parseSelector", args: [{ name: "x", type: "Expr" }], returns: [{ type: "Expr" }] }, { name: "parseTypeAssertion", args: [{ name: "x", type: "Expr" }], returns: [{ type: "Expr" }] }, { name: "parseIndexOrSliceOrInstance", args: [{ name: "x", type: "Expr" }], returns: [{ type: "Expr" }] }, { name: "parseCallOrConversion", args: [{ name: "fun", type: "Expr" }], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "CallExpr" } }] }, { name: "parseValue", args: [], returns: [{ type: "Expr" }] }, { name: "parseElement", args: [], returns: [{ type: "Expr" }] }, { name: "parseElementList", args: [], returns: [{ type: { kind: $.TypeKind.Slice, elemType: "Expr" } }] }, { name: "parseLiteralValue", args: [{ name: "typ", type: "Expr" }], returns: [{ type: "Expr" }] }, { name: "parsePrimaryExpr", args: [{ name: "x", type: "Expr" }], returns: [{ type: "Expr" }] }, { name: "parseUnaryExpr", args: [], returns: [{ type: "Expr" }] }, { name: "tokPrec", args: [], returns: [{ type: "Token" }, { type: { kind: $.TypeKind.Basic, name: "number" } }] }, { name: "parseBinaryExpr", args: [{ name: "x", type: "Expr" }, { name: "prec1", type: { kind: $.TypeKind.Basic, name: "number" } }], returns: [{ type: "Expr" }] }, { name: "parseExpr", args: [], returns: [{ type: "Expr" }] }, { name: "parseRhs", args: [], returns: [{ type: "Expr" }] }, { name: "parseSimpleStmt", args: [{ name: "mode", type: { kind: $.TypeKind.Basic, name: "number" } }], returns: [{ type: "Stmt" }, { type: { kind: $.TypeKind.Basic, name: "boolean" } }] }, { name: "parseCallExpr", args: [{ name: "callType", type: { kind: $.TypeKind.Basic, name: "string" } }], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "CallExpr" } }] }, { name: "parseGoStmt", args: [], returns: [{ type: "Stmt" }] }, { name: "parseDeferStmt", args: [], returns: [{ type: "Stmt" }] }, { name: "parseReturnStmt", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "ReturnStmt" } }] }, { name: "parseBranchStmt", args: [{ name: "tok", type: "Token" }], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "BranchStmt" } }] }, { name: "makeExpr", args: [{ name: "s", type: "Stmt" }, { name: "want", type: { kind: $.TypeKind.Basic, name: "string" } }], returns: [{ type: "Expr" }] }, { name: "parseIfHeader", args: [], returns: [{ type: "Stmt" }, { type: "Expr" }] }, { name: "parseIfStmt", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "IfStmt" } }] }, { name: "parseCaseClause", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "CaseClause" } }] }, { name: "isTypeSwitchGuard", args: [{ name: "s", type: "Stmt" }], returns: [{ type: { kind: $.TypeKind.Basic, name: "boolean" } }] }, { name: "parseSwitchStmt", args: [], returns: [{ type: "Stmt" }] }, { name: "parseCommClause", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "CommClause" } }] }, { name: "parseSelectStmt", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "SelectStmt" } }] }, { name: "parseForStmt", args: [], returns: [{ type: "Stmt" }] }, { name: "parseStmt", args: [], returns: [{ type: "Stmt" }] }, { name: "parseImportSpec", args: [{ name: "doc", type: { kind: $.TypeKind.Pointer, elemType: "CommentGroup" } }, { name: "_", type: "Token" }, { name: "_", type: { kind: $.TypeKind.Basic, name: "number" } }], returns: [{ type: "Spec" }] }, { name: "parseValueSpec", args: [{ name: "doc", type: { kind: $.TypeKind.Pointer, elemType: "CommentGroup" } }, { name: "keyword", type: "Token" }, { name: "iota", type: { kind: $.TypeKind.Basic, name: "number" } }], returns: [{ type: "Spec" }] }, { name: "parseGenericType", args: [{ name: "spec", type: { kind: $.TypeKind.Pointer, elemType: "TypeSpec" } }, { name: "openPos", type: "Pos" }, { name: "name0", type: { kind: $.TypeKind.Pointer, elemType: "Ident" } }, { name: "typ0", type: "Expr" }], returns: [] }, { name: "parseTypeSpec", args: [{ name: "doc", type: { kind: $.TypeKind.Pointer, elemType: "CommentGroup" } }, { name: "_", type: "Token" }, { name: "_", type: { kind: $.TypeKind.Basic, name: "number" } }], returns: [{ type: "Spec" }] }, { name: "parseGenDecl", args: [{ name: "keyword", type: "Token" }, { name: "f", type: "parseSpecFunction" }], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "GenDecl" } }] }, { name: "parseFuncDecl", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "FuncDecl" } }] }, { name: "parseDecl", args: [{ name: "sync", type: { kind: $.TypeKind.Map, keyType: "Token", elemType: { kind: $.TypeKind.Basic, name: "boolean" } } }], returns: [{ type: "Decl" }] }, { name: "parseFile", args: [], returns: [{ type: { kind: $.TypeKind.Pointer, elemType: "File" } }] }], + parser, + {"file": { kind: $.TypeKind.Pointer, elemType: "File" }, "errors": "ErrorList", "scanner": "Scanner", "mode": "Mode", "trace": { kind: $.TypeKind.Basic, name: "boolean" }, "indent": { kind: $.TypeKind.Basic, name: "number" }, "comments": { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "CommentGroup" } }, "leadComment": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }, "lineComment": { kind: $.TypeKind.Pointer, elemType: "CommentGroup" }, "top": { kind: $.TypeKind.Basic, name: "boolean" }, "goVersion": { kind: $.TypeKind.Basic, name: "string" }, "pos": "Pos", "tok": "Token", "lit": { kind: $.TypeKind.Basic, name: "string" }, "syncPos": "Pos", "syncCnt": { kind: $.TypeKind.Basic, name: "number" }, "exprLev": { kind: $.TypeKind.Basic, name: "number" }, "inRhs": { kind: $.TypeKind.Basic, name: "boolean" }, "imports": { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "ImportSpec" } }, "nestLev": { kind: $.TypeKind.Basic, name: "number" }} + ); +} + +let declStart: Map | null = new Map([[token.IMPORT, true], [token.CONST, true], [token.TYPE, true], [token.VAR, true]]) + +let exprEnd: Map | null = new Map([[token.COMMA, true], [token.COLON, true], [token.SEMICOLON, true], [token.RPAREN, true], [token.RBRACK, true], [token.RBRACE, true]]) + +let stmtStart: Map | null = new Map([[token.BREAK, true], [token.CONST, true], [token.CONTINUE, true], [token.DEFER, true], [token.FALLTHROUGH, true], [token.FOR, true], [token.GO, true], [token.GOTO, true], [token.IF, true], [token.RETURN, true], [token.SELECT, true], [token.SWITCH, true], [token.TYPE, true], [token.VAR, true]]) + +export function trace(p: parser | null, msg: string): parser | null { + p!.printTrace(msg, "(") + p!.indent++ + return p +} + +// Usage pattern: defer un(trace(p, "...")) +export function un(p: parser | null): void { + p!.indent-- + p!.printTrace(")") +} + +export function incNestLev(p: parser | null): parser | null { + p!.nestLev++ + if (p!.nestLev > 100000) { + p!.error(p!.pos, "exceeded max nesting depth") + $.panic($.markAsStructValue(new bailout({}))) + } + return p +} + +// decNestLev is used to track nesting depth during parsing to prevent stack exhaustion. +// It is used along with incNestLev in a similar fashion to how un and trace are used. +export function decNestLev(p: parser | null): void { + p!.nestLev-- +} + +export function assert(cond: boolean, msg: string): void { + if (!cond) { + $.panic("go/parser internal error: " + msg) + } +} + +export function isTypeSwitchAssert(x: ast.Expr): boolean { + let { value: a, ok: ok } = $.typeAssert(x, {kind: $.TypeKind.Pointer, elemType: 'ast.TypeAssertExpr'}) + return ok && a!.Type == null +} + +// extractName splits the expression x into (name, expr) if syntactically +// x can be written as name expr. The split only happens if expr is a type +// element (per the isTypeElem predicate) or if force is set. +// If x is just a name, the result is (name, nil). If the split succeeds, +// the result is (name, expr). Otherwise the result is (nil, x). +// Examples: +// +// x force name expr +// ------------------------------------ +// P*[]int T/F P *[]int +// P*E T P *E +// P*E F nil P*E +// P([]int) T/F P ([]int) +// P(E) T P (E) +// P(E) F nil P(E) +// P*E|F|~G T/F P *E|F|~G +// P*E|F|G T P *E|F|G +// P*E|F|G F nil P*E|F|G +export function extractName(x: ast.Expr, force: boolean): [ast.Ident | null, ast.Expr] { + + // x = name *x.Y + + // x = name lhs|x.Y + + // x = name (x.Args[0]) + // (Note that the cmd/compile/internal/syntax parser does not care + // about syntax tree fidelity and does not preserve parentheses here.) + $.typeSwitch(x, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}], body: (x) => { + return [x, null] + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.BinaryExpr'}], body: (x) => { + switch (x!.Op) { + case token.MUL: + { + let { value: name } = $.typeAssert(x!.X, {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}) + if (name != null && (force || isTypeElem(x!.Y))) { + // x = name *x.Y + return [name, new ast.StarExpr({Star: x!.OpPos, X: x!.Y})] + } + } + break + case token.OR: + { + let [name, lhs] = extractName(x!.X, force || isTypeElem(x!.Y)) + if (name != null && lhs != null) { + // x = name lhs|x.Y + let op = $.markAsStructValue(x!.clone()) + op.X = lhs + return [name, op] + } + } + break + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.CallExpr'}], body: (x) => { + { + let { value: name } = $.typeAssert(x!.Fun, {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}) + if (name != null) { + + // x = name (x.Args[0]) + // (Note that the cmd/compile/internal/syntax parser does not care + // about syntax tree fidelity and does not preserve parentheses here.) + if ($.len(x!.Args) == 1 && x!.Ellipsis == token.NoPos && (force || isTypeElem(x!.Args![0]))) { + // x = name (x.Args[0]) + // (Note that the cmd/compile/internal/syntax parser does not care + // about syntax tree fidelity and does not preserve parentheses here.) + return [name, new ast.ParenExpr({Lparen: x!.Lparen, Rparen: x!.Rparen, X: x!.Args![0]})] + } + } + } + }}]) + return [null, x] +} + +// isTypeElem reports whether x is a (possibly parenthesized) type element expression. +// The result is false if x could be a type element OR an ordinary (value) expression. +export function isTypeElem(x: ast.Expr): boolean { + $.typeSwitch(x, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'ast.ArrayType'}, {kind: $.TypeKind.Pointer, elemType: 'ast.StructType'}, {kind: $.TypeKind.Pointer, elemType: 'ast.FuncType'}, {kind: $.TypeKind.Pointer, elemType: 'ast.InterfaceType'}, {kind: $.TypeKind.Pointer, elemType: 'ast.MapType'}, {kind: $.TypeKind.Pointer, elemType: 'ast.ChanType'}], body: (x) => { + return true + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.BinaryExpr'}], body: (x) => { + return isTypeElem(x!.X) || isTypeElem(x!.Y) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.UnaryExpr'}], body: (x) => { + return x!.Op == token.TILDE + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.ParenExpr'}], body: (x) => { + return isTypeElem(x!.X) + }}]) + return false +} + +// packIndexExpr returns an IndexExpr x[expr0] or IndexListExpr x[expr0, ...]. +export function packIndexExpr(x: ast.Expr, lbrack: token.Pos, exprs: $.Slice, rbrack: token.Pos): ast.Expr { + switch ($.len(exprs)) { + case 0: + $.panic("internal error: packIndexExpr with empty expr slice") + break + case 1: + return new ast.IndexExpr({Index: exprs![0], Lbrack: lbrack, Rbrack: rbrack, X: x}) + break + default: + return new ast.IndexListExpr({Indices: exprs, Lbrack: lbrack, Rbrack: rbrack, X: x}) + break + } +} + diff --git a/compliance/deps/go/parser/resolver.gs.ts b/compliance/deps/go/parser/resolver.gs.ts new file mode 100644 index 00000000..d66a1608 --- /dev/null +++ b/compliance/deps/go/parser/resolver.gs.ts @@ -0,0 +1,856 @@ +import * as $ from "@goscript/builtin/index.js" +import { assert } from "./parser.gs.js"; +import { bailout } from "./parser.gs.js"; + +import * as fmt from "@goscript/fmt/index.js" + +import * as ast from "@goscript/go/ast/index.js" + +import * as token from "@goscript/go/token/index.js" + +import * as strings from "@goscript/strings/index.js" + +let debugResolve: boolean = false + +let maxScopeDepth: number = 1e3 + +export class resolver { + public get handle(): token.File | null { + return this._fields.handle.value + } + public set handle(value: token.File | null) { + this._fields.handle.value = value + } + + public get declErr(): ((p0: token.Pos, p1: string) => void) | null { + return this._fields.declErr.value + } + public set declErr(value: ((p0: token.Pos, p1: string) => void) | null) { + this._fields.declErr.value = value + } + + // Ordinary identifier scopes + // pkgScope.Outer == nil + public get pkgScope(): ast.Scope | null { + return this._fields.pkgScope.value + } + public set pkgScope(value: ast.Scope | null) { + this._fields.pkgScope.value = value + } + + // top-most scope; may be pkgScope + public get topScope(): ast.Scope | null { + return this._fields.topScope.value + } + public set topScope(value: ast.Scope | null) { + this._fields.topScope.value = value + } + + // unresolved identifiers + public get unresolved(): $.Slice { + return this._fields.unresolved.value + } + public set unresolved(value: $.Slice) { + this._fields.unresolved.value = value + } + + // scope depth + public get depth(): number { + return this._fields.depth.value + } + public set depth(value: number) { + this._fields.depth.value = value + } + + // Label scopes + // (maintained by open/close LabelScope) + // label scope for current function + public get labelScope(): ast.Scope | null { + return this._fields.labelScope.value + } + public set labelScope(value: ast.Scope | null) { + this._fields.labelScope.value = value + } + + // stack of unresolved labels + public get targetStack(): $.Slice<$.Slice> { + return this._fields.targetStack.value + } + public set targetStack(value: $.Slice<$.Slice>) { + this._fields.targetStack.value = value + } + + public _fields: { + handle: $.VarRef; + declErr: $.VarRef<((p0: token.Pos, p1: string) => void) | null>; + pkgScope: $.VarRef; + topScope: $.VarRef; + unresolved: $.VarRef<$.Slice>; + depth: $.VarRef; + labelScope: $.VarRef; + targetStack: $.VarRef<$.Slice<$.Slice>>; + } + + constructor(init?: Partial<{declErr?: ((p0: token.Pos, p1: string) => void) | null, depth?: number, handle?: token.File | null, labelScope?: ast.Scope | null, pkgScope?: ast.Scope | null, targetStack?: $.Slice<$.Slice>, topScope?: ast.Scope | null, unresolved?: $.Slice}>) { + this._fields = { + handle: $.varRef(init?.handle ?? null), + declErr: $.varRef(init?.declErr ?? null), + pkgScope: $.varRef(init?.pkgScope ?? null), + topScope: $.varRef(init?.topScope ?? null), + unresolved: $.varRef(init?.unresolved ?? null), + depth: $.varRef(init?.depth ?? 0), + labelScope: $.varRef(init?.labelScope ?? null), + targetStack: $.varRef(init?.targetStack ?? null) + } + } + + public clone(): resolver { + const cloned = new resolver() + cloned._fields = { + handle: $.varRef(this._fields.handle.value ? $.markAsStructValue(this._fields.handle.value.clone()) : null), + declErr: $.varRef(this._fields.declErr.value), + pkgScope: $.varRef(this._fields.pkgScope.value ? $.markAsStructValue(this._fields.pkgScope.value.clone()) : null), + topScope: $.varRef(this._fields.topScope.value ? $.markAsStructValue(this._fields.topScope.value.clone()) : null), + unresolved: $.varRef(this._fields.unresolved.value), + depth: $.varRef(this._fields.depth.value), + labelScope: $.varRef(this._fields.labelScope.value ? $.markAsStructValue(this._fields.labelScope.value.clone()) : null), + targetStack: $.varRef(this._fields.targetStack.value) + } + return cloned + } + + public trace(format: string, ...args: any[]): void { + const r = this + fmt.Println(strings.Repeat(". ", r.depth) + r.sprintf(format, ...(args ?? []))) + } + + public sprintf(format: string, ...args: any[]): string { + const r = this + for (let i = 0; i < $.len(args); i++) { + const arg = args![i] + { + $.typeSwitch(arg, [{ types: ['token.Pos'], body: (arg) => { + args![i] = $.markAsStructValue(await r.handle!.Position(arg).clone()) + }}]) + } + } + return fmt.Sprintf(format, ...(args ?? [])) + } + + public openScope(pos: token.Pos): void { + const r = this + r.depth++ + if (r.depth > 1000) { + $.panic($.markAsStructValue(new bailout({msg: "exceeded max scope depth during object resolution", pos: pos}))) + } + if (false) { + r.trace("opening scope @%v", pos) + } + r.topScope = ast.NewScope(r.topScope) + } + + public closeScope(): void { + const r = this + r.depth-- + if (false) { + r.trace("closing scope") + } + r.topScope = r.topScope!.Outer + } + + public openLabelScope(): void { + const r = this + r.labelScope = ast.NewScope(r.labelScope) + r.targetStack = $.append(r.targetStack, null) + } + + public closeLabelScope(): void { + const r = this + let n = $.len(r.targetStack) - 1 + let scope = r.labelScope + for (let _i = 0; _i < $.len(r.targetStack![n]); _i++) { + const ident = r.targetStack![n]![_i] + { + ident!.Obj = scope!.Lookup(ident!.Name) + if (ident!.Obj == null && r.declErr != null) { + r.declErr(ident!.Pos(), fmt.Sprintf("label %s undefined", ident!.Name)) + } + } + } + r.targetStack = $.goSlice(r.targetStack, 0, n) + r.labelScope = r.labelScope!.Outer + } + + public _declare(decl: null | any, data: null | any, scope: ast.Scope | null, kind: ast.ObjKind, ...idents: ast.Ident | null[]): void { + const r = this + for (let _i = 0; _i < $.len(idents); _i++) { + const ident = idents![_i] + { + if (ident!.Obj != null) { + $.panic(fmt.Sprintf("%v: identifier %s already declared or resolved", ident!.Pos(), ident!.Name)) + } + let obj = ast.NewObj(kind, ident!.Name) + // remember the corresponding declaration for redeclaration + // errors and global variable resolution/typechecking phase + obj!.Decl = decl + obj!.Data = data + // Identifiers (for receiver type parameters) are written to the scope, but + // never set as the resolved object. See go.dev/issue/50956. + { + let { ok: ok } = $.typeAssert(decl, {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}) + if (!ok) { + ident!.Obj = obj + } + } + if (ident!.Name != "_") { + if (false) { + r.trace("declaring %s@%v", ident!.Name, ident!.Pos()) + } + { + let alt = scope!.Insert(obj) + if (alt != null && r.declErr != null) { + let prevDecl = "" + { + let pos = alt!.Pos() + if (token.Pos_IsValid(pos)) { + prevDecl = r.sprintf("\n\tprevious declaration at %v", pos) + } + } + r.declErr(ident!.Pos(), fmt.Sprintf("%s redeclared in this block%s", ident!.Name, prevDecl)) + } + } + } + } + } + } + + public shortVarDecl(decl: ast.AssignStmt | null): void { + const r = this + let n = 0 // number of new variables + for (let _i = 0; _i < $.len(decl!.Lhs); _i++) { + const x = decl!.Lhs![_i] + { + + // remember corresponding assignment for other tools + + // redeclaration + + // new declaration + { + let { value: ident, ok: isIdent } = $.typeAssert(x, {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}) + if (isIdent) { + assert(ident!.Obj == null, "identifier already declared or resolved") + let obj = ast.NewObj(ast.Var, ident!.Name) + // remember corresponding assignment for other tools + obj!.Decl = decl + ident!.Obj = obj + + // redeclaration + + // new declaration + if (ident!.Name != "_") { + if (false) { + r.trace("declaring %s@%v", ident!.Name, ident!.Pos()) + } + + // redeclaration + + // new declaration + { + let alt = r.topScope!.Insert(obj) + if (alt != null) { + ident!.Obj = alt // redeclaration + } + else { + n++ + } + } + } + } + } + } + } + if (n == 0 && r.declErr != null) { + r.declErr(decl!.Lhs![0]!.Pos(), "no new variables on left side of :=") + } + } + + // If x is an identifier, resolve attempts to resolve x by looking up + // the object it denotes. If no object is found and collectUnresolved is + // set, x is marked as unresolved and collected in the list of unresolved + // identifiers. + public resolve(ident: ast.Ident | null, collectUnresolved: boolean): void { + const r = this + if (ident!.Obj != null) { + $.panic(r.sprintf("%v: identifier %s already declared or resolved", ident!.Pos(), ident!.Name)) + } + if (ident!.Name == "_") { + return + } + for (let s = r.topScope; s != null; s = s!.Outer) { + + // Identifiers (for receiver type parameters) are written to the scope, + // but never set as the resolved object. See go.dev/issue/50956. + { + let obj = s!.Lookup(ident!.Name) + if (obj != null) { + if (false) { + r.trace("resolved %v:%s to %v", ident!.Pos(), ident!.Name, obj) + } + assert(obj!.Name != "", "obj with no name") + // Identifiers (for receiver type parameters) are written to the scope, + // but never set as the resolved object. See go.dev/issue/50956. + { + let { ok: ok } = $.typeAssert(obj!.Decl, {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}) + if (!ok) { + ident!.Obj = obj + } + } + return + } + } + } + if (collectUnresolved) { + ident!.Obj = unresolved + r.unresolved = $.append(r.unresolved, ident) + } + } + + public walkExprs(list: $.Slice): void { + const r = this + for (let _i = 0; _i < $.len(list); _i++) { + const node = list![_i] + { + ast.Walk(r, node) + } + } + } + + public walkLHS(list: $.Slice): void { + const r = this + for (let _i = 0; _i < $.len(list); _i++) { + const expr = list![_i] + { + let expr = ast.Unparen(expr) + { + let { ok: ok } = $.typeAssert(expr, {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}) + if (!ok && expr != null) { + ast.Walk(r, expr) + } + } + } + } + } + + public walkStmts(list: $.Slice): void { + const r = this + for (let _i = 0; _i < $.len(list); _i++) { + const stmt = list![_i] + { + ast.Walk(r, stmt) + } + } + } + + public Visit(node: ast.Node): ast.Visitor { + const r = this + using __defer = new $.DisposableStack(); + if (false && node != null) { + r.trace("node %T@%v", node, node!.Pos()) + } + $.typeSwitch(node, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}], body: (n) => { + r.resolve(n, true) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.FuncLit'}], body: (n) => { + r.openScope(n!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + r.walkFuncType(n!.Type) + r.walkBody(n!.Body) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.SelectorExpr'}], body: (n) => { + ast.Walk(r, n!.X) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.StructType'}], body: (n) => { + r.openScope(n!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + r.walkFieldList(n!.Fields, ast.Var) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.FuncType'}], body: (n) => { + r.openScope(n!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + r.walkFuncType(n) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.CompositeLit'}], body: (n) => { + if (n!.Type != null) { + ast.Walk(r, n!.Type) + } + for (let _i = 0; _i < $.len(n!.Elts); _i++) { + const e = n!.Elts![_i] + { + + // See go.dev/issue/45160: try to resolve composite lit keys, but don't + // collect them as unresolved if resolution failed. This replicates + // existing behavior when resolving during parsing. + { + let { value: kv } = $.typeAssert(e, {kind: $.TypeKind.Pointer, elemType: 'ast.KeyValueExpr'}) + if (kv != null) { + // See go.dev/issue/45160: try to resolve composite lit keys, but don't + // collect them as unresolved if resolution failed. This replicates + // existing behavior when resolving during parsing. + { + let { value: ident } = $.typeAssert(kv!.Key, {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}) + if (ident != null) { + r.resolve(ident, false) + } + else { + ast.Walk(r, kv!.Key) + } + } + ast.Walk(r, kv!.Value) + } + else { + ast.Walk(r, e) + } + } + } + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.InterfaceType'}], body: (n) => { + r.openScope(n!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + r.walkFieldList(n!.Methods, ast.Fun) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.LabeledStmt'}], body: (n) => { + r._declare(n, null, r.labelScope, ast.Lbl, n!.Label) + ast.Walk(r, n!.Stmt) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.AssignStmt'}], body: (n) => { + r.walkExprs(n!.Rhs) + if (n!.Tok == token.DEFINE) { + r.shortVarDecl(n) + } + else { + r.walkExprs(n!.Lhs) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.BranchStmt'}], body: (n) => { + if (n!.Tok != token.FALLTHROUGH && n!.Label != null) { + let depth = $.len(r.targetStack) - 1 + r.targetStack![depth] = $.append(r.targetStack![depth], n!.Label) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.BlockStmt'}], body: (n) => { + r.openScope(n!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + r.walkStmts(n!.List) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.IfStmt'}], body: (n) => { + r.openScope(n!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + if (n!.Init != null) { + ast.Walk(r, n!.Init) + } + ast.Walk(r, n!.Cond) + ast.Walk(r, n!.Body) + if (n!.Else != null) { + ast.Walk(r, n!.Else) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.CaseClause'}], body: (n) => { + r.walkExprs(n!.List) + r.openScope(n!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + r.walkStmts(n!.Body) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.SwitchStmt'}], body: (n) => { + r.openScope(n!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + if (n!.Init != null) { + ast.Walk(r, n!.Init) + } + if (n!.Tag != null) { + using __defer = new $.DisposableStack(); + // The scope below reproduces some unnecessary behavior of the parser, + // opening an extra scope in case this is a type switch. It's not needed + // for expression switches. + // TODO: remove this once we've matched the parser resolution exactly. + if (n!.Init != null) { + using __defer = new $.DisposableStack(); + r.openScope(n!.Tag!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + } + ast.Walk(r, n!.Tag) + } + if (n!.Body != null) { + r.walkStmts(n!.Body!.List) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.TypeSwitchStmt'}], body: (n) => { + if (n!.Init != null) { + using __defer = new $.DisposableStack(); + r.openScope(n!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + ast.Walk(r, n!.Init) + } + r.openScope(n!.Assign!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + ast.Walk(r, n!.Assign) + if (n!.Body != null) { + r.walkStmts(n!.Body!.List) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.CommClause'}], body: (n) => { + r.openScope(n!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + if (n!.Comm != null) { + ast.Walk(r, n!.Comm) + } + r.walkStmts(n!.Body) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.SelectStmt'}], body: (n) => { + if (n!.Body != null) { + r.walkStmts(n!.Body!.List) + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.ForStmt'}], body: (n) => { + r.openScope(n!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + if (n!.Init != null) { + ast.Walk(r, n!.Init) + } + if (n!.Cond != null) { + ast.Walk(r, n!.Cond) + } + if (n!.Post != null) { + ast.Walk(r, n!.Post) + } + ast.Walk(r, n!.Body) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.RangeStmt'}], body: (n) => { + r.openScope(n!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + ast.Walk(r, n!.X) + let lhs: $.Slice = null + if (n!.Key != null) { + lhs = $.append(lhs, n!.Key) + } + if (n!.Value != null) { + lhs = $.append(lhs, n!.Value) + } + if ($.len(lhs) > 0) { + + // Note: we can't exactly match the behavior of object resolution + // during the parsing pass here, as it uses the position of the RANGE + // token for the RHS OpPos. That information is not contained within + // the AST. + + // TODO(rFindley): this walkLHS reproduced the parser resolution, but + // is it necessary? By comparison, for a normal AssignStmt we don't + // walk the LHS in case there is an invalid identifier list. + if (n!.Tok == token.DEFINE) { + // Note: we can't exactly match the behavior of object resolution + // during the parsing pass here, as it uses the position of the RANGE + // token for the RHS OpPos. That information is not contained within + // the AST. + let _as = new ast.AssignStmt({Lhs: lhs, Rhs: $.arrayToSlice([new ast.UnaryExpr({Op: token.RANGE, X: n!.X})]), Tok: token.DEFINE, TokPos: n!.TokPos}) + // TODO(rFindley): this walkLHS reproduced the parser resolution, but + // is it necessary? By comparison, for a normal AssignStmt we don't + // walk the LHS in case there is an invalid identifier list. + r.walkLHS(lhs) + r.shortVarDecl(_as) + } + else { + r.walkExprs(lhs) + } + } + ast.Walk(r, n!.Body) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.GenDecl'}], body: (n) => { + switch (n!.Tok) { + case token.CONST: + case token.VAR: + for (let i = 0; i < $.len(n!.Specs); i++) { + const spec = n!.Specs![i] + { + let spec = $.mustTypeAssert(spec, {kind: $.TypeKind.Pointer, elemType: 'ast.ValueSpec'}) + let kind = ast.Con + if (n!.Tok == token.VAR) { + kind = ast.Var + } + r.walkExprs(spec!.Values) + if (spec!.Type != null) { + ast.Walk(r, spec!.Type) + } + r._declare(spec, i, r.topScope, kind, ...(spec!.Names ?? [])) + } + } + break + case token.TYPE: + for (let _i = 0; _i < $.len(n!.Specs); _i++) { + const spec = n!.Specs![_i] + { + using __defer = new $.DisposableStack(); + let spec = $.mustTypeAssert(spec, {kind: $.TypeKind.Pointer, elemType: 'ast.TypeSpec'}) + // Go spec: The scope of a type identifier declared inside a function begins + // at the identifier in the TypeSpec and ends at the end of the innermost + // containing block. + r._declare(spec, null, r.topScope, ast.Typ, spec!.Name) + if (spec!.TypeParams != null) { + using __defer = new $.DisposableStack(); + r.openScope(spec!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + r.walkTParams(spec!.TypeParams) + } + ast.Walk(r, spec!.Type) + } + } + break + } + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.FuncDecl'}], body: (n) => { + r.openScope(n!.Pos()) + __defer.defer(() => { + r.closeScope() + }); + r.walkRecv(n!.Recv) + if (n!.Type!.TypeParams != null) { + + // TODO(rFindley): need to address receiver type parameters. + r.walkTParams(n!.Type!.TypeParams) + // TODO(rFindley): need to address receiver type parameters. + } + r.resolveList(n!.Type!.Params) + r.resolveList(n!.Type!.Results) + r.declareList(n!.Recv, ast.Var) + r.declareList(n!.Type!.Params, ast.Var) + r.declareList(n!.Type!.Results, ast.Var) + r.walkBody(n!.Body) + if (n!.Recv == null && n!.Name!.Name != "init") { + r._declare(n, null, r.pkgScope, ast.Fun, n!.Name) + } + }}], () => { + return r + }) + return null + } + + public walkFuncType(typ: ast.FuncType | null): void { + const r = this + r.resolveList(typ!.Params) + r.resolveList(typ!.Results) + r.declareList(typ!.Params, ast.Var) + r.declareList(typ!.Results, ast.Var) + } + + public resolveList(list: ast.FieldList | null): void { + const r = this + if (list == null) { + return + } + for (let _i = 0; _i < $.len(list!.List); _i++) { + const f = list!.List![_i] + { + if (f!.Type != null) { + ast.Walk(r, f!.Type) + } + } + } + } + + public declareList(list: ast.FieldList | null, kind: ast.ObjKind): void { + const r = this + if (list == null) { + return + } + for (let _i = 0; _i < $.len(list!.List); _i++) { + const f = list!.List![_i] + { + r._declare(f, null, r.topScope, kind, ...(f!.Names ?? [])) + } + } + } + + public walkRecv(recv: ast.FieldList | null): void { + const r = this + if (recv == null || $.len(recv!.List) == 0) { + return + } + let typ = recv!.List![0]!.Type + { + let { value: ptr, ok: ok } = $.typeAssert(typ, {kind: $.TypeKind.Pointer, elemType: 'ast.StarExpr'}) + if (ok) { + typ = ptr!.X + } + } + // exprs to declare + let declareExprs: $.Slice = null + // exprs to resolve + let resolveExprs: $.Slice = null + $.typeSwitch(typ, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'ast.IndexExpr'}], body: (typ) => { + declareExprs = $.arrayToSlice([typ!.Index]) + resolveExprs = $.append(resolveExprs, typ!.X) + }}, + { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.IndexListExpr'}], body: (typ) => { + declareExprs = typ!.Indices + resolveExprs = $.append(resolveExprs, typ!.X) + }}], () => { + resolveExprs = $.append(resolveExprs, typ) + }) + for (let _i = 0; _i < $.len(declareExprs); _i++) { + const expr = declareExprs![_i] + { + + // The receiver type parameter expression is invalid, but try to resolve + // it anyway for consistency. + { + let { value: id } = $.typeAssert(expr, {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}) + if (id != null) { + r._declare(expr, null, r.topScope, ast.Typ, id) + } + else { + // The receiver type parameter expression is invalid, but try to resolve + // it anyway for consistency. + resolveExprs = $.append(resolveExprs, expr) + } + } + } + } + for (let _i = 0; _i < $.len(resolveExprs); _i++) { + const expr = resolveExprs![_i] + { + if (expr != null) { + ast.Walk(r, expr) + } + } + } + for (let _i = 0; _i < $.len($.goSlice(recv!.List, 1, undefined)); _i++) { + const f = $.goSlice(recv!.List, 1, undefined)![_i] + { + if (f!.Type != null) { + ast.Walk(r, f!.Type) + } + } + } + } + + public walkFieldList(list: ast.FieldList | null, kind: ast.ObjKind): void { + const r = this + if (list == null) { + return + } + r.resolveList(list) + r.declareList(list, kind) + } + + // walkTParams is like walkFieldList, but declares type parameters eagerly so + // that they may be resolved in the constraint expressions held in the field + // Type. + public walkTParams(list: ast.FieldList | null): void { + const r = this + r.declareList(list, ast.Typ) + r.resolveList(list) + } + + public walkBody(body: ast.BlockStmt | null): void { + const r = this + using __defer = new $.DisposableStack(); + if (body == null) { + return + } + r.openLabelScope() + __defer.defer(() => { + r.closeLabelScope() + }); + r.walkStmts(body!.List) + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'resolver', + new resolver(), + [{ name: "trace", args: [{ name: "format", type: { kind: $.TypeKind.Basic, name: "string" } }, { name: "args", type: { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Interface, methods: [] } } }], returns: [] }, { name: "sprintf", args: [{ name: "format", type: { kind: $.TypeKind.Basic, name: "string" } }, { name: "args", type: { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Interface, methods: [] } } }], returns: [{ type: { kind: $.TypeKind.Basic, name: "string" } }] }, { name: "openScope", args: [{ name: "pos", type: "Pos" }], returns: [] }, { name: "closeScope", args: [], returns: [] }, { name: "openLabelScope", args: [], returns: [] }, { name: "closeLabelScope", args: [], returns: [] }, { name: "declare", args: [{ name: "decl", type: { kind: $.TypeKind.Interface, methods: [] } }, { name: "data", type: { kind: $.TypeKind.Interface, methods: [] } }, { name: "scope", type: { kind: $.TypeKind.Pointer, elemType: "Scope" } }, { name: "kind", type: "ObjKind" }, { name: "idents", type: { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "Ident" } } }], returns: [] }, { name: "shortVarDecl", args: [{ name: "decl", type: { kind: $.TypeKind.Pointer, elemType: "AssignStmt" } }], returns: [] }, { name: "resolve", args: [{ name: "ident", type: { kind: $.TypeKind.Pointer, elemType: "Ident" } }, { name: "collectUnresolved", type: { kind: $.TypeKind.Basic, name: "boolean" } }], returns: [] }, { name: "walkExprs", args: [{ name: "list", type: { kind: $.TypeKind.Slice, elemType: "Expr" } }], returns: [] }, { name: "walkLHS", args: [{ name: "list", type: { kind: $.TypeKind.Slice, elemType: "Expr" } }], returns: [] }, { name: "walkStmts", args: [{ name: "list", type: { kind: $.TypeKind.Slice, elemType: "Stmt" } }], returns: [] }, { name: "Visit", args: [{ name: "node", type: "Node" }], returns: [{ type: "Visitor" }] }, { name: "walkFuncType", args: [{ name: "typ", type: { kind: $.TypeKind.Pointer, elemType: "FuncType" } }], returns: [] }, { name: "resolveList", args: [{ name: "list", type: { kind: $.TypeKind.Pointer, elemType: "FieldList" } }], returns: [] }, { name: "declareList", args: [{ name: "list", type: { kind: $.TypeKind.Pointer, elemType: "FieldList" } }, { name: "kind", type: "ObjKind" }], returns: [] }, { name: "walkRecv", args: [{ name: "recv", type: { kind: $.TypeKind.Pointer, elemType: "FieldList" } }], returns: [] }, { name: "walkFieldList", args: [{ name: "list", type: { kind: $.TypeKind.Pointer, elemType: "FieldList" } }, { name: "kind", type: "ObjKind" }], returns: [] }, { name: "walkTParams", args: [{ name: "list", type: { kind: $.TypeKind.Pointer, elemType: "FieldList" } }], returns: [] }, { name: "walkBody", args: [{ name: "body", type: { kind: $.TypeKind.Pointer, elemType: "BlockStmt" } }], returns: [] }], + resolver, + {"handle": { kind: $.TypeKind.Pointer, elemType: "File" }, "declErr": { kind: $.TypeKind.Function, params: ["Pos", { kind: $.TypeKind.Basic, name: "string" }], results: [] }, "pkgScope": { kind: $.TypeKind.Pointer, elemType: "Scope" }, "topScope": { kind: $.TypeKind.Pointer, elemType: "Scope" }, "unresolved": { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "Ident" } }, "depth": { kind: $.TypeKind.Basic, name: "number" }, "labelScope": { kind: $.TypeKind.Pointer, elemType: "Scope" }, "targetStack": { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Slice, elemType: { kind: $.TypeKind.Pointer, elemType: "Ident" } } }} + ); +} + +let unresolved: ast.Object | null = new ast.Object() + +// resolveFile walks the given file to resolve identifiers within the file +// scope, updating ast.Ident.Obj fields with declaration information. +// +// If declErr is non-nil, it is used to report declaration errors during +// resolution. tok is used to format position in error messages. +export function resolveFile(file: ast.File | null, handle: token.File | null, declErr: ((p0: token.Pos, p1: string) => void) | null): void { + let pkgScope = null + let r = new resolver({declErr: declErr, depth: 1, handle: handle, pkgScope: pkgScope, topScope: pkgScope}) + + for (let _i = 0; _i < $.len(file!.Decls); _i++) { + const decl = file!.Decls![_i] + { + ast.Walk(r, decl) + } + } + + r!.closeScope() + assert(r!.topScope == null, "unbalanced scopes") + assert(r!.labelScope == null, "unbalanced label scopes") + + // resolve global identifiers within the same file + let i = 0 + + // i <= index for current ident + + // also removes unresolved sentinel + for (let _i = 0; _i < $.len(r!.unresolved); _i++) { + const ident = r!.unresolved![_i] + { + // i <= index for current ident + assert((ident!.Obj === unresolved), "object already resolved") + ident!.Obj = r!.pkgScope!.Lookup(ident!.Name) // also removes unresolved sentinel + if (ident!.Obj == null) { + r!.unresolved![i] = ident + i++ + } + else if (false) { + let pos = $.mustTypeAssert(ident!.Obj!.Decl, {kind: $.TypeKind.Interface, methods: [{ name: 'Pos', args: [], returns: [{ type: 'token.Pos' }] }]})!.Pos() + r!.trace("resolved %s@%v to package object %v", ident!.Name, ident!.Pos(), pos) + } + } + } + file!.Scope = r!.pkgScope + file!.Unresolved = $.goSlice(r!.unresolved, 0, i) +} + diff --git a/compliance/deps/go/scanner/scanner.gs.ts b/compliance/deps/go/scanner/scanner.gs.ts index 604bd46f..cbca3909 100644 --- a/compliance/deps/go/scanner/scanner.gs.ts +++ b/compliance/deps/go/scanner/scanner.gs.ts @@ -388,7 +388,7 @@ export class Scanner { } let filename = $.bytesToString($.goSlice(text, undefined, i - 1)) // lop off ":line", and trim white space if (filename == "" && ok2) { - filename = await s.file!.Position(s.file!.Pos(offs))!.Filename + filename = (await s.file!.Position(s.file!.Pos(offs)))!.Filename } else if (filename != "") { // Put a relative filename in the current directory. diff --git a/compliance/deps/go/token/position.gs.ts b/compliance/deps/go/token/position.gs.ts index 4ad8ac14..746078ec 100644 --- a/compliance/deps/go/token/position.gs.ts +++ b/compliance/deps/go/token/position.gs.ts @@ -775,7 +775,7 @@ export class File { // p must be a [Pos] value in that file or [NoPos]. public async Line(p: Pos): Promise { const f = this - return await f.Position(p)!.Line + return (await f.Position(p))!.Line } // unpack returns the filename and line and column number for a file offset. diff --git a/compliance/tests/await_selector_on_call/expected.log b/compliance/tests/await_selector_on_call/expected.log new file mode 100644 index 00000000..d81cc071 --- /dev/null +++ b/compliance/tests/await_selector_on_call/expected.log @@ -0,0 +1 @@ +42 diff --git a/compliance/tests/await_selector_on_call/index.ts b/compliance/tests/await_selector_on_call/index.ts new file mode 100644 index 00000000..3280b382 --- /dev/null +++ b/compliance/tests/await_selector_on_call/index.ts @@ -0,0 +1,2 @@ +export { F } from "./main.gs.js" +export { S } from "./main.gs.js" diff --git a/compliance/tests/await_selector_on_call/main.go b/compliance/tests/await_selector_on_call/main.go new file mode 100644 index 00000000..5f61daf6 --- /dev/null +++ b/compliance/tests/await_selector_on_call/main.go @@ -0,0 +1,20 @@ +package main + +// Minimal reproducer for selector on awaited call result +// Ensures compiler emits (await F())!.V rather than await F()!.V + +type S struct{ V int } + +var ch = make(chan int, 1) + +// F is async due to channel send and returns *S +func F() *S { + ch <- 1 // makes F async in TS + return &S{V: 42} +} + +func main() { + // Access field on call expression base + // Should compile to (await F())!.V + println(F().V) +} diff --git a/compliance/tests/await_selector_on_call/main.gs.ts b/compliance/tests/await_selector_on_call/main.gs.ts new file mode 100644 index 00000000..e2328f53 --- /dev/null +++ b/compliance/tests/await_selector_on_call/main.gs.ts @@ -0,0 +1,55 @@ +// Generated file based on main.go +// Updated when compliance tests are re-run, DO NOT EDIT! + +import * as $ from "@goscript/builtin/index.js" + +export class S { + public get V(): number { + return this._fields.V.value + } + public set V(value: number) { + this._fields.V.value = value + } + + public _fields: { + V: $.VarRef; + } + + constructor(init?: Partial<{V?: number}>) { + this._fields = { + V: $.varRef(init?.V ?? 0) + } + } + + public clone(): S { + const cloned = new S() + cloned._fields = { + V: $.varRef(this._fields.V.value) + } + return cloned + } + + // Register this type with the runtime type system + static __typeInfo = $.registerStructType( + 'S', + new S(), + [], + S, + {"V": { kind: $.TypeKind.Basic, name: "number" }} + ); +} + +let ch: $.Channel | null = $.makeChannel(1, 0, 'both') + +// F is async due to channel send and returns *S +export async function F(): Promise { + await $.chanSend(ch, 1) + return new S({V: 42}) +} + +export async function main(): Promise { + // Access field on call expression base + // Should compile to (await F())!.V + console.log((await F())!.V) +} + diff --git a/compliance/tests/await_selector_on_call/tsconfig.json b/compliance/tests/await_selector_on_call/tsconfig.json new file mode 100644 index 00000000..437e6b67 --- /dev/null +++ b/compliance/tests/await_selector_on_call/tsconfig.json @@ -0,0 +1,32 @@ +{ + "compilerOptions": { + "allowImportingTsExtensions": true, + "lib": [ + "es2022", + "esnext.disposable", + "dom" + ], + "module": "nodenext", + "moduleResolution": "nodenext", + "noEmit": true, + "paths": { + "*": [ + "./*" + ], + "@goscript/*": [ + "../../../gs/*", + "../../../compliance/deps/*" + ], + "@goscript/github.com/aperturerobotics/goscript/compliance/tests/await_selector_on_call/*": [ + "./*" + ] + }, + "sourceMap": true, + "target": "es2022" + }, + "extends": "../../../tsconfig.json", + "include": [ + "index.ts", + "main.gs.ts" + ] +} diff --git a/compliance/tests/package_import_go_parser/package_import_go_parser.gs.ts b/compliance/tests/package_import_go_parser/package_import_go_parser.gs.ts index 96c6a517..e1f276d1 100644 --- a/compliance/tests/package_import_go_parser/package_import_go_parser.gs.ts +++ b/compliance/tests/package_import_go_parser/package_import_go_parser.gs.ts @@ -1,7 +1,7 @@ // Generated file based on package_import_go_parser.go // Updated when compliance tests are re-run, DO NOT EDIT! -import * as $ from "@goscript/builtin/index.js"; +import * as $ from "@goscript/builtin/index.js" import * as ast from "@goscript/go/ast/index.js" @@ -33,14 +33,14 @@ export async function main(): Promise { // Extract type information from the parsed expression $.typeSwitch(node, [{ types: [{kind: $.TypeKind.Pointer, elemType: 'ast.BasicLit'}], body: (n) => { - results = $.append(results, "BasicLit: " + n.Value) + results = $.append(results, "BasicLit: " + n!.Value) }}, { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.BinaryExpr'}], body: (n) => { - results = $.append(results, "BinaryExpr with operator: " + token.Token_String(n.Op)) + results = $.append(results, "BinaryExpr with operator: " + token.Token_String(n!.Op)) }}, { types: [{kind: $.TypeKind.Pointer, elemType: 'ast.CallExpr'}], body: (n) => { { - let { value: ident, ok: ok } = $.typeAssert(n.Fun, {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}) + let { value: ident, ok: ok } = $.typeAssert(n!.Fun, {kind: $.TypeKind.Pointer, elemType: 'ast.Ident'}) if (ok) { results = $.append(results, "CallExpr: " + ident!.Name) } diff --git a/compliance/tests/package_import_go_parser/skip-test b/compliance/tests/package_import_go_parser/skip-test deleted file mode 100644 index 1f5fc429..00000000 --- a/compliance/tests/package_import_go_parser/skip-test +++ /dev/null @@ -1,25 +0,0 @@ -# This test is skipped because the transpilation of go/parser and related packages -# reveals multiple fundamental issues with the GoScript transpiler: -# -# ISSUES IDENTIFIED: -# 1. Async propagation: Fixed - AddLineInfo now correctly marked as async when calling async AddLineColumnInfo -# 2. Missing imports: scanner package is used but not imported in generated TypeScript -# 3. Promise handling: Many methods use await but aren't marked as async -# 4. Type issues: Promise types not handled correctly in return types and assignments -# 5. Destructuring issues: Problems with array/tuple destructuring assignments -# 6. Variable scoping: Block-scoped variable redeclaration errors -# 7. Missing type definitions: Types like serializedFileSet are not defined -# -# PARTIAL FIXES COMPLETED: -# ✅ Fixed async propagation in analysis.go - methods calling async methods are now marked async -# -# REMAINING WORK: -# - Improve import detection for package-qualified function calls (scanner.ErrorList_Add) -# - Fix Promise handling for async methods that return values -# - Fix variable scoping and destructuring assignment generation -# - Provide hand-written TypeScript implementations for go/parser, go/ast, go/token, go/scanner -# (similar to how fmt, os, and other complex packages are handled) -# -# RECOMMENDATION: -# Implement hand-written TypeScript versions of these packages rather than fixing -# all transpilation issues, as the Go standard library parser is quite complex. \ No newline at end of file