diff --git a/internal/lsp/cmd/cmd.go b/internal/lsp/cmd/cmd.go index 41c2bce2a2..fdaf6d1abc 100644 --- a/internal/lsp/cmd/cmd.go +++ b/internal/lsp/cmd/cmd.go @@ -504,13 +504,9 @@ func (c *connection) AddFile(ctx context.Context, uri span.URI) *cmdFile { return file } -func (c *connection) semanticTokens(ctx context.Context, file span.URI) (*protocol.SemanticTokens, error) { - p := &protocol.SemanticTokensParams{ - TextDocument: protocol.TextDocumentIdentifier{ - URI: protocol.URIFromSpanURI(file), - }, - } - resp, err := c.Server.SemanticTokensFull(ctx, p) +func (c *connection) semanticTokens(ctx context.Context, p *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) { + // use range to avoid limits on full + resp, err := c.Server.SemanticTokensRange(ctx, p) if err != nil { return nil, err } diff --git a/internal/lsp/cmd/semantictokens.go b/internal/lsp/cmd/semantictokens.go index 41e353cc8f..e8f9018c7d 100644 --- a/internal/lsp/cmd/semantictokens.go +++ b/internal/lsp/cmd/semantictokens.go @@ -94,13 +94,24 @@ func (c *semtok) Run(ctx context.Context, args ...string) error { return file.err } - resp, err := conn.semanticTokens(ctx, uri) + buf, err := ioutil.ReadFile(args[0]) if err != nil { return err } - buf, err := ioutil.ReadFile(args[0]) + lines := bytes.Split(buf, []byte{'\n'}) + p := &protocol.SemanticTokensRangeParams{ + TextDocument: protocol.TextDocumentIdentifier{ + URI: protocol.URIFromSpanURI(uri), + }, + Range: protocol.Range{Start: protocol.Position{Line: 0, Character: 0}, + End: protocol.Position{ + Line: uint32(len(lines) - 1), + Character: uint32(len(lines[len(lines)-1]))}, + }, + } + resp, err := conn.semanticTokens(ctx, p) if err != nil { - log.Fatal(err) + return err } fset := token.NewFileSet() f, err := parser.ParseFile(fset, args[0], buf, 0) diff --git a/internal/lsp/semantic.go b/internal/lsp/semantic.go index fbca581e49..8230a7c46c 100644 --- a/internal/lsp/semantic.go +++ b/internal/lsp/semantic.go @@ -22,6 +22,8 @@ import ( errors "golang.org/x/xerrors" ) +const maxFullFileSize int = 100000 // reject full semantic token requests for large files + func (s *Server) semanticTokensFull(ctx context.Context, p *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) { ret, err := s.computeSemanticTokens(ctx, p.TextDocument, nil) return ret, err @@ -68,6 +70,11 @@ func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocu if pgf.ParseErr != nil { return nil, pgf.ParseErr } + if rng == nil && len(pgf.Src) > maxFullFileSize { + err := fmt.Errorf("semantic tokens: file %s too large for full (%d>%d)", + td.URI.SpanURI().Filename(), len(pgf.Src), maxFullFileSize) + return nil, err + } e := &encoded{ ctx: ctx, pgf: pgf, @@ -491,7 +498,7 @@ func (e *encoded) init() error { } span, err := e.pgf.Mapper.RangeSpan(*e.rng) if err != nil { - return errors.Errorf("range span error for %s", e.pgf.File.Name) + return errors.Errorf("range span (%v) error for %s", err, e.pgf.File.Name) } e.end = e.start + token.Pos(span.End().Offset()) e.start += token.Pos(span.Start().Offset())