internal/lsp/semantic: avoid doing semantic tokens for large files

LSP (and gopls) support both full-file semantic token requests and
requests for just a range, typically roughly what's visible to the user.
It can be slow to produce the full set for a very large file, so
this code now responds with an error if the file is bigger than
100,000 bytes. After getting this error, vscode, at least,
will stop asking for full requests and use range requests.

Alternatively, server capabilities could say gopls never responds to
full-file requests, but doing that doesn't stop vscode from asking for
them. Another possibility would be to fix a time limit (like 8ms) for
how long to spend generating full-file semantic tokens. That's tricky
to get right, but one could instead generate an error when there
are more than 4,000 semantic tokens (on my laptop, that's about 8ms.)

Large files are unusual; a simple size limit seems adequate for now.

Change-Id: Ieea0d16aad6e37cc4f14b1a6a7116a4e41197aae
Reviewed-on: https://go-review.googlesource.com/c/tools/+/307729
Run-TryBot: Peter Weinberger <pjw@google.com>
gopls-CI: kokoro <noreply+kokoro@google.com>
TryBot-Result: Go Bot <gobot@golang.org>
Trust: Peter Weinberger <pjw@google.com>
Reviewed-by: Rebecca Stambler <rstambler@golang.org>
This commit is contained in:
pjw 2021-04-06 09:50:52 -04:00 committed by Peter Weinberger
parent a13dbf1ae0
commit dbc8747628
3 changed files with 25 additions and 11 deletions

View File

@ -504,13 +504,9 @@ func (c *connection) AddFile(ctx context.Context, uri span.URI) *cmdFile {
return file
}
func (c *connection) semanticTokens(ctx context.Context, file span.URI) (*protocol.SemanticTokens, error) {
p := &protocol.SemanticTokensParams{
TextDocument: protocol.TextDocumentIdentifier{
URI: protocol.URIFromSpanURI(file),
},
}
resp, err := c.Server.SemanticTokensFull(ctx, p)
func (c *connection) semanticTokens(ctx context.Context, p *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) {
// use range to avoid limits on full
resp, err := c.Server.SemanticTokensRange(ctx, p)
if err != nil {
return nil, err
}

View File

@ -94,13 +94,24 @@ func (c *semtok) Run(ctx context.Context, args ...string) error {
return file.err
}
resp, err := conn.semanticTokens(ctx, uri)
buf, err := ioutil.ReadFile(args[0])
if err != nil {
return err
}
buf, err := ioutil.ReadFile(args[0])
lines := bytes.Split(buf, []byte{'\n'})
p := &protocol.SemanticTokensRangeParams{
TextDocument: protocol.TextDocumentIdentifier{
URI: protocol.URIFromSpanURI(uri),
},
Range: protocol.Range{Start: protocol.Position{Line: 0, Character: 0},
End: protocol.Position{
Line: uint32(len(lines) - 1),
Character: uint32(len(lines[len(lines)-1]))},
},
}
resp, err := conn.semanticTokens(ctx, p)
if err != nil {
log.Fatal(err)
return err
}
fset := token.NewFileSet()
f, err := parser.ParseFile(fset, args[0], buf, 0)

View File

@ -22,6 +22,8 @@ import (
errors "golang.org/x/xerrors"
)
const maxFullFileSize int = 100000 // reject full semantic token requests for large files
func (s *Server) semanticTokensFull(ctx context.Context, p *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) {
ret, err := s.computeSemanticTokens(ctx, p.TextDocument, nil)
return ret, err
@ -68,6 +70,11 @@ func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocu
if pgf.ParseErr != nil {
return nil, pgf.ParseErr
}
if rng == nil && len(pgf.Src) > maxFullFileSize {
err := fmt.Errorf("semantic tokens: file %s too large for full (%d>%d)",
td.URI.SpanURI().Filename(), len(pgf.Src), maxFullFileSize)
return nil, err
}
e := &encoded{
ctx: ctx,
pgf: pgf,
@ -491,7 +498,7 @@ func (e *encoded) init() error {
}
span, err := e.pgf.Mapper.RangeSpan(*e.rng)
if err != nil {
return errors.Errorf("range span error for %s", e.pgf.File.Name)
return errors.Errorf("range span (%v) error for %s", err, e.pgf.File.Name)
}
e.end = e.start + token.Pos(span.End().Offset())
e.start += token.Pos(span.Start().Offset())