Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 47 additions & 1 deletion internal/ast/ast.go
Original file line number Diff line number Diff line change
Expand Up @@ -10781,6 +10781,7 @@ type SourceFile struct {

tokenCacheMu sync.Mutex
tokenCache map[core.TextRange]*Node
tokenFactory *NodeFactory
declarationMapMu sync.Mutex
declarationMap map[string][]*Node
}
Expand Down Expand Up @@ -10942,6 +10943,7 @@ func (node *SourceFile) GetOrCreateToken(
pos int,
end int,
parent *Node,
flags TokenFlags,
) *TokenNode {
node.tokenCacheMu.Lock()
defer node.tokenCacheMu.Unlock()
Expand All @@ -10959,13 +10961,57 @@ func (node *SourceFile) GetOrCreateToken(
return token
}

token := newNode(kind, &Token{}, NodeFactoryHooks{})
token := createToken(kind, node, pos, end, flags)
token.Loc = loc
token.Parent = parent
node.tokenCache[loc] = token
return token
}

// `kind` should be a token kind.
func createToken(kind Kind, file *SourceFile, pos, end int, flags TokenFlags) *Node {
if file.tokenFactory == nil {
file.tokenFactory = NewNodeFactory(NodeFactoryHooks{})
}
text := file.text[pos:end]
switch kind {
case KindNumericLiteral:
literal := file.tokenFactory.NewNumericLiteral(text)
literal.AsNumericLiteral().TokenFlags = flags & TokenFlagsNumericLiteralFlags
return literal
case KindBigIntLiteral:
literal := file.tokenFactory.NewBigIntLiteral(text)
literal.AsBigIntLiteral().TokenFlags = flags & TokenFlagsNumericLiteralFlags
return literal
case KindStringLiteral:
literal := file.tokenFactory.NewStringLiteral(text)
literal.AsStringLiteral().TokenFlags = flags & TokenFlagsStringLiteralFlags
return literal
case KindJsxText, KindJsxTextAllWhiteSpaces:
return file.tokenFactory.NewJsxText(text, kind == KindJsxTextAllWhiteSpaces)
case KindRegularExpressionLiteral:
literal := file.tokenFactory.NewRegularExpressionLiteral(text)
literal.AsRegularExpressionLiteral().TokenFlags = flags & TokenFlagsRegularExpressionLiteralFlags
return literal
case KindNoSubstitutionTemplateLiteral:
literal := file.tokenFactory.NewNoSubstitutionTemplateLiteral(text)
literal.AsNoSubstitutionTemplateLiteral().TokenFlags = flags & TokenFlagsTemplateLiteralLikeFlags
return literal
case KindTemplateHead:
return file.tokenFactory.NewTemplateHead(text, "" /*rawText*/, flags&TokenFlagsTemplateLiteralLikeFlags)
case KindTemplateMiddle:
return file.tokenFactory.NewTemplateMiddle(text, "" /*rawText*/, flags&TokenFlagsTemplateLiteralLikeFlags)
case KindTemplateTail:
return file.tokenFactory.NewTemplateTail(text, "" /*rawText*/, flags&TokenFlagsTemplateLiteralLikeFlags)
case KindIdentifier:
return file.tokenFactory.NewIdentifier(text)
case KindPrivateIdentifier:
return file.tokenFactory.NewPrivateIdentifier(text)
default: // Punctuation and keywords
return file.tokenFactory.NewToken(kind)
}
}

func IsSourceFile(node *Node) bool {
return node.Kind == KindSourceFile
}
Expand Down
20 changes: 13 additions & 7 deletions internal/astnav/tokens.go
Original file line number Diff line number Diff line change
Expand Up @@ -187,17 +187,18 @@ func getTokenAtPosition(
tokenFullStart := scanner.TokenFullStart()
tokenStart := core.IfElse(allowPositionInLeadingTrivia, tokenFullStart, scanner.TokenStart())
tokenEnd := scanner.TokenEnd()
flags := scanner.TokenFlags()
if tokenStart <= position && (position < tokenEnd) {
if token == ast.KindIdentifier || !ast.IsTokenKind(token) {
if ast.IsJSDocKind(current.Kind) {
return current
}
panic(fmt.Sprintf("did not expect %s to have %s in its trivia", current.Kind.String(), token.String()))
}
return sourceFile.GetOrCreateToken(token, tokenFullStart, tokenEnd, current)
return sourceFile.GetOrCreateToken(token, tokenFullStart, tokenEnd, current, flags)
}
if includePrecedingTokenAtEndPosition != nil && tokenEnd == position {
prevToken := sourceFile.GetOrCreateToken(token, tokenFullStart, tokenEnd, current)
prevToken := sourceFile.GetOrCreateToken(token, tokenFullStart, tokenEnd, current, flags)
if includePrecedingTokenAtEndPosition(prevToken) {
return prevToken
}
Expand Down Expand Up @@ -514,7 +515,8 @@ func findRightmostValidToken(endPos int, sourceFile *ast.SourceFile, containingN
tokenFullStart := scanner.TokenFullStart()
tokenEnd := scanner.TokenEnd()
startPos = tokenEnd
tokens = append(tokens, sourceFile.GetOrCreateToken(token, tokenFullStart, tokenEnd, n))
flags := scanner.TokenFlags()
tokens = append(tokens, sourceFile.GetOrCreateToken(token, tokenFullStart, tokenEnd, n, flags))
scanner.Scan()
}
startPos = visitedNode.End()
Expand All @@ -531,7 +533,8 @@ func findRightmostValidToken(endPos int, sourceFile *ast.SourceFile, containingN
tokenFullStart := scanner.TokenFullStart()
tokenEnd := scanner.TokenEnd()
startPos = tokenEnd
tokens = append(tokens, sourceFile.GetOrCreateToken(token, tokenFullStart, tokenEnd, n))
flags := scanner.TokenFlags()
tokens = append(tokens, sourceFile.GetOrCreateToken(token, tokenFullStart, tokenEnd, n, flags))
scanner.Scan()
}

Expand Down Expand Up @@ -616,8 +619,9 @@ func FindNextToken(previousToken *ast.Node, parent *ast.Node, file *ast.SourceFi
tokenFullStart := scanner.TokenFullStart()
tokenStart := scanner.TokenStart()
tokenEnd := scanner.TokenEnd()
flags := scanner.TokenFlags()
if tokenStart == previousToken.End() {
return file.GetOrCreateToken(token, tokenFullStart, tokenEnd, n)
return file.GetOrCreateToken(token, tokenFullStart, tokenEnd, n, flags)
}
panic(fmt.Sprintf("Expected to find next token at %d, got token %s at %d", previousToken.End(), token, tokenStart))
}
Expand Down Expand Up @@ -690,7 +694,8 @@ func FindChildOfKind(containingNode *ast.Node, kind ast.Kind, sourceFile *ast.So
tokenKind := scan.Token()
tokenFullStart := scan.TokenFullStart()
tokenEnd := scan.TokenEnd()
token := sourceFile.GetOrCreateToken(tokenKind, tokenFullStart, tokenEnd, containingNode)
flags := scan.TokenFlags()
token := sourceFile.GetOrCreateToken(tokenKind, tokenFullStart, tokenEnd, containingNode, flags)
if tokenKind == kind {
foundChild = token
return true
Expand Down Expand Up @@ -720,7 +725,8 @@ func FindChildOfKind(containingNode *ast.Node, kind ast.Kind, sourceFile *ast.So
tokenKind := scan.Token()
tokenFullStart := scan.TokenFullStart()
tokenEnd := scan.TokenEnd()
token := sourceFile.GetOrCreateToken(tokenKind, tokenFullStart, tokenEnd, containingNode)
flags := scan.TokenFlags()
token := sourceFile.GetOrCreateToken(tokenKind, tokenFullStart, tokenEnd, containingNode, flags)
if tokenKind == kind {
return token
}
Expand Down
25 changes: 25 additions & 0 deletions internal/fourslash/tests/completionsUnterminatedLiteral_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package fourslash_test

import (
"testing"

"github.com/microsoft/typescript-go/internal/fourslash"
. "github.com/microsoft/typescript-go/internal/fourslash/tests/util"
"github.com/microsoft/typescript-go/internal/testutil"
)

func TestCompletionsUnterminatedLiteral(t *testing.T) {
t.Parallel()

defer testutil.RecoverAndFail(t, "Panic on fourslash test")
const content = `// @noLib: true
function foo(a"/*1*/`
f, done := fourslash.NewFourslash(t, nil /*capabilities*/, content)
defer done()
f.VerifyCompletions(t, "1", &fourslash.CompletionsExpectedList{
ItemDefaults: &fourslash.CompletionsExpectedItemDefaults{
CommitCharacters: &DefaultCommitCharacters,
},
Items: &fourslash.CompletionsExpectedItems{},
})
}
6 changes: 3 additions & 3 deletions internal/ls/change/tracker.go
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ func (t *Tracker) InsertNodeBefore(sourceFile *ast.SourceFile, before *ast.Node,
// InsertModifierBefore inserts a modifier token (like 'type') before a node with a trailing space.
func (t *Tracker) InsertModifierBefore(sourceFile *ast.SourceFile, modifier ast.Kind, before *ast.Node) {
pos := astnav.GetStartOfNode(before, sourceFile, false)
token := sourceFile.GetOrCreateToken(modifier, pos, pos, before.Parent)
token := sourceFile.GetOrCreateToken(modifier, pos, pos, before.Parent, ast.TokenFlagsNone)
t.InsertNodeAt(sourceFile, core.TextPos(pos), token, NodeOptions{Suffix: " "})
}

Expand Down Expand Up @@ -262,7 +262,7 @@ func (t *Tracker) endPosForInsertNodeAfter(sourceFile *ast.SourceFile, after *as
endPos := t.converters.PositionToLineAndCharacter(sourceFile, core.TextPos(after.End()))
t.ReplaceRange(sourceFile,
lsproto.Range{Start: endPos, End: endPos},
sourceFile.GetOrCreateToken(ast.KindSemicolonToken, after.End(), after.End(), after.Parent),
sourceFile.GetOrCreateToken(ast.KindSemicolonToken, after.End(), after.End(), after.Parent, ast.TokenFlagsNone),
NodeOptions{},
)
}
Expand Down Expand Up @@ -347,7 +347,7 @@ func (t *Tracker) InsertNodeInListAfter(sourceFile *ast.SourceFile, after *ast.N

// insert separator immediately following the 'after' node to preserve comments in trailing trivia
// !!! formatcontext
t.ReplaceRange(sourceFile, lsproto.Range{Start: end, End: end}, sourceFile.GetOrCreateToken(separator, after.End(), after.End()+len(separatorString), after.Parent), NodeOptions{})
t.ReplaceRange(sourceFile, lsproto.Range{Start: end, End: end}, sourceFile.GetOrCreateToken(separator, after.End(), after.End()+len(separatorString), after.Parent, ast.TokenFlagsNone), NodeOptions{})
Copy link

Copilot AI Dec 10, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This creates a synthetic token with end position after.End()+len(separatorString), which extends beyond the actual text currently in the source file. The new createToken function (added in this PR at internal/ast/ast.go line 10976) performs text := file.text[pos:end], which will panic with "slice bounds out of range" when end > len(file.text).

To fix this, createToken needs to handle cases where positions extend beyond the source text. Add bounds checking:

func createToken(kind Kind, file *SourceFile, pos, end int, flags TokenFlags) *Node {
	if file.tokenFactory == nil {
		file.tokenFactory = NewNodeFactory(NodeFactoryHooks{})
	}
	var text string
	if pos >= 0 && end <= len(file.text) && pos <= end {
		text = file.text[pos:end]
	}
	// For punctuation/keywords (default case), text isn't used anyway
	// For literals/identifiers created with invalid bounds, this is a programming error
	// that should be caught, but we can default to empty text for robustness
	switch kind {
	// ... rest of the cases
	}
}

Copilot uses AI. Check for mistakes.
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I noticed this too, will follow up on it in a separate PR: the change tracker should not be calling file.GetOrCreateToken() at all to create synthetic tokens.

// use the same indentation as 'after' item
indentation := format.FindFirstNonWhitespaceColumn(afterStartLinePosition, afterStart, sourceFile, t.formatSettings)
// insert element before the line break on the line that contains 'after' element
Expand Down
4 changes: 2 additions & 2 deletions internal/ls/lsutil/children.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ func GetLastChild(node *ast.Node, sourceFile *ast.SourceFile) *ast.Node {
tokenKind := scanner.Token()
tokenFullStart := scanner.TokenFullStart()
tokenEnd := scanner.TokenEnd()
lastToken = sourceFile.GetOrCreateToken(tokenKind, tokenFullStart, tokenEnd, node)
lastToken = sourceFile.GetOrCreateToken(tokenKind, tokenFullStart, tokenEnd, node, scanner.TokenFlags())
startPos = tokenEnd
scanner.Scan()
}
Expand Down Expand Up @@ -108,7 +108,7 @@ func GetFirstToken(node *ast.Node, sourceFile *ast.SourceFile) *ast.Node {
tokenKind := scanner.Token()
tokenFullStart := scanner.TokenFullStart()
tokenEnd := scanner.TokenEnd()
firstToken = sourceFile.GetOrCreateToken(tokenKind, tokenFullStart, tokenEnd, node)
firstToken = sourceFile.GetOrCreateToken(tokenKind, tokenFullStart, tokenEnd, node, scanner.TokenFlags())
}

if firstToken != nil {
Expand Down
2 changes: 1 addition & 1 deletion internal/ls/signaturehelp.go
Original file line number Diff line number Diff line change
Expand Up @@ -1241,7 +1241,7 @@ func getTokenFromNodeList(nodeList *ast.NodeList, nodeListParent *ast.Node, sour
token := scanner.Token()
tokenFullStart := scanner.TokenFullStart()
tokenEnd := scanner.TokenEnd()
tokens = append(tokens, sourceFile.GetOrCreateToken(token, tokenFullStart, tokenEnd, nodeListParent))
tokens = append(tokens, sourceFile.GetOrCreateToken(token, tokenFullStart, tokenEnd, nodeListParent, scanner.TokenFlags()))
left = tokenEnd
}
}
Expand Down
6 changes: 3 additions & 3 deletions internal/ls/utilities.go
Original file line number Diff line number Diff line change
Expand Up @@ -752,7 +752,7 @@ func nodeEndsWith(n *ast.Node, expectedLastToken ast.Kind, sourceFile *ast.Sourc
tokenKind := scanner.Token()
tokenFullStart := scanner.TokenFullStart()
tokenEnd := scanner.TokenEnd()
token := sourceFile.GetOrCreateToken(tokenKind, tokenFullStart, tokenEnd, n)
token := sourceFile.GetOrCreateToken(tokenKind, tokenFullStart, tokenEnd, n, scanner.TokenFlags())
lastNodeAndTokens = append(lastNodeAndTokens, token)
startPos = tokenEnd
scanner.Scan()
Expand Down Expand Up @@ -1571,7 +1571,7 @@ func getChildrenFromNonJSDocNode(node *ast.Node, sourceFile *ast.SourceFile) []*
token := scanner.Token()
tokenFullStart := scanner.TokenFullStart()
tokenEnd := scanner.TokenEnd()
children = append(children, sourceFile.GetOrCreateToken(token, tokenFullStart, tokenEnd, node))
children = append(children, sourceFile.GetOrCreateToken(token, tokenFullStart, tokenEnd, node, scanner.TokenFlags()))
pos = tokenEnd
scanner.Scan()
}
Expand All @@ -1583,7 +1583,7 @@ func getChildrenFromNonJSDocNode(node *ast.Node, sourceFile *ast.SourceFile) []*
token := scanner.Token()
tokenFullStart := scanner.TokenFullStart()
tokenEnd := scanner.TokenEnd()
children = append(children, sourceFile.GetOrCreateToken(token, tokenFullStart, tokenEnd, node))
children = append(children, sourceFile.GetOrCreateToken(token, tokenFullStart, tokenEnd, node, scanner.TokenFlags()))
pos = tokenEnd
scanner.Scan()
}
Expand Down