mirror of https://github.com/golang/go.git
gopls/semantic: semantic tokens for type parameters
Fixes confusion in the code for recognizing type paramenters. also, in
f := func() {}; defer f()
the second f is now tagged as a function
Fixes golang/go#56529
Change-Id: Ida37e7fb1caa0bec86376cc24ebfad5c1228a905
Reviewed-on: https://go-review.googlesource.com/c/tools/+/448375
TryBot-Result: Gopher Robot <gobot@golang.org>
gopls-CI: kokoro <noreply+kokoro@google.com>
Run-TryBot: Peter Weinberger <pjw@google.com>
Reviewed-by: Robert Findley <rfindley@google.com>
This commit is contained in:
parent
6e8da3febb
commit
89856a4c91
|
|
@ -100,7 +100,7 @@ func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocu
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// ignore pgf.ParseErr. Do what we can.
|
||||
|
||||
if rng == nil && len(pgf.Src) > maxFullFileSize {
|
||||
err := fmt.Errorf("semantic tokens: file %s too large for full (%d>%d)",
|
||||
fh.URI().Filename(), len(pgf.Src), maxFullFileSize)
|
||||
|
|
@ -180,7 +180,7 @@ const (
|
|||
func (e *encoded) token(start token.Pos, leng int, typ tokenType, mods []string) {
|
||||
|
||||
if !start.IsValid() {
|
||||
// This is not worth reporting
|
||||
// This is not worth reporting. TODO(pjw): does it still happen?
|
||||
return
|
||||
}
|
||||
if start >= e.end || start+token.Pos(leng) <= e.start {
|
||||
|
|
@ -296,7 +296,7 @@ func (e *encoded) inspector(n ast.Node) bool {
|
|||
e.token(x.TokPos, len(x.Tok.String()), tokOperator, nil)
|
||||
case *ast.BasicLit:
|
||||
if strings.Contains(x.Value, "\n") {
|
||||
// has to be a string
|
||||
// has to be a string.
|
||||
e.multiline(x.Pos(), x.End(), x.Value, tokString)
|
||||
break
|
||||
}
|
||||
|
|
@ -379,7 +379,7 @@ func (e *encoded) inspector(n ast.Node) bool {
|
|||
case *ast.IncDecStmt:
|
||||
e.token(x.TokPos, len(x.Tok.String()), tokOperator, nil)
|
||||
case *ast.IndexExpr:
|
||||
case *typeparams.IndexListExpr: // accommodate generics
|
||||
case *typeparams.IndexListExpr:
|
||||
case *ast.InterfaceType:
|
||||
e.token(x.Interface, len("interface"), tokKeyword, nil)
|
||||
case *ast.KeyValueExpr:
|
||||
|
|
@ -523,8 +523,6 @@ func (e *encoded) ident(x *ast.Ident) {
|
|||
case *types.Var:
|
||||
if isSignature(y) {
|
||||
tok(x.Pos(), len(x.Name), tokFunction, nil)
|
||||
} else if _, ok := y.Type().(*typeparams.TypeParam); ok {
|
||||
tok(x.Pos(), len(x.Name), tokTypeParam, nil)
|
||||
} else if e.isParam(use.Pos()) {
|
||||
// variable, unless use.pos is the pos of a Field in an ancestor FuncDecl
|
||||
// or FuncLit and then it's a parameter
|
||||
|
|
@ -572,9 +570,6 @@ func (e *encoded) isParam(pos token.Pos) bool {
|
|||
}
|
||||
|
||||
func isSignature(use types.Object) bool {
|
||||
if true {
|
||||
return false //PJW: fix after generics seem ok
|
||||
}
|
||||
if _, ok := use.(*types.Var); !ok {
|
||||
return false
|
||||
}
|
||||
|
|
@ -606,7 +601,7 @@ func (e *encoded) unkIdent(x *ast.Ident) (tokenType, []string) {
|
|||
*ast.IfStmt, /* condition */
|
||||
*ast.KeyValueExpr: // either key or value
|
||||
return tokVariable, nil
|
||||
case *typeparams.IndexListExpr: // generic?
|
||||
case *typeparams.IndexListExpr:
|
||||
return tokVariable, nil
|
||||
case *ast.Ellipsis:
|
||||
return tokType, nil
|
||||
|
|
@ -726,7 +721,7 @@ func isDeprecated(n *ast.CommentGroup) bool {
|
|||
|
||||
func (e *encoded) definitionFor(x *ast.Ident, def types.Object) (tokenType, []string) {
|
||||
// PJW: def == types.Label? probably a nothing
|
||||
// PJW: look into replaceing these syntactic tests with types more generally
|
||||
// PJW: look into replacing these syntactic tests with types more generally
|
||||
mods := []string{"definition"}
|
||||
for i := len(e.stack) - 1; i >= 0; i-- {
|
||||
s := e.stack[i]
|
||||
|
|
@ -761,7 +756,10 @@ func (e *encoded) definitionFor(x *ast.Ident, def types.Object) (tokenType, []st
|
|||
}
|
||||
// if x < ... < FieldList < FuncType < FuncDecl, this is a param
|
||||
return tokParameter, mods
|
||||
case *ast.FuncType:
|
||||
case *ast.FuncType: // is it in the TypeParams?
|
||||
if isTypeParam(x, y) {
|
||||
return tokTypeParam, mods
|
||||
}
|
||||
return tokParameter, mods
|
||||
case *ast.InterfaceType:
|
||||
return tokMethod, mods
|
||||
|
|
@ -793,6 +791,21 @@ func (e *encoded) definitionFor(x *ast.Ident, def types.Object) (tokenType, []st
|
|||
return "", []string{""}
|
||||
}
|
||||
|
||||
func isTypeParam(x *ast.Ident, y *ast.FuncType) bool {
|
||||
tp := typeparams.ForFuncType(y)
|
||||
if tp == nil {
|
||||
return false
|
||||
}
|
||||
for _, p := range tp.List {
|
||||
for _, n := range p.Names {
|
||||
if x == n {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e *encoded) multiline(start, end token.Pos, val string, tok tokenType) {
|
||||
f := e.fset.File(start)
|
||||
// the hard part is finding the lengths of lines. include the \n
|
||||
|
|
@ -940,6 +953,7 @@ func SemType(n int) string {
|
|||
if n >= 0 && n < len(tokTypes) {
|
||||
return tokTypes[n]
|
||||
}
|
||||
// not found for some reason
|
||||
return fmt.Sprintf("?%d[%d,%d]?", n, len(tokTypes), len(tokMods))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@
|
|||
|
||||
/*⇒4,keyword,[]*/func /*⇒1,function,[definition]*/g(/*⇒2,parameter,[definition]*/vv /*⇒3,operator,[]*/.../*⇒9,keyword,[]*/interface{}) {
|
||||
/*⇒2,variable,[definition]*/ff /*⇒2,operator,[]*/:= /*⇒4,keyword,[]*/func() {}
|
||||
/*⇒5,keyword,[]*/defer /*⇒2,variable,[]*/ff()
|
||||
/*⇒5,keyword,[]*/defer /*⇒2,function,[]*/ff()
|
||||
/*⇒2,keyword,[]*/go /*⇒3,namespace,[]*/utf./*⇒9,function,[]*/RuneCount(/*⇒2,string,[]*/"")
|
||||
/*⇒2,keyword,[]*/go /*⇒4,namespace,[]*/utf8./*⇒9,function,[]*/RuneCount(/*⇒2,parameter,[]*/vv.(/*⇒6,type,[]*/string))
|
||||
/*⇒2,keyword,[]*/if /*⇒4,variable,[readonly]*/true {
|
||||
|
|
|
|||
|
|
@ -5,10 +5,14 @@
|
|||
package misc
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"golang.org/x/tools/gopls/internal/lsp"
|
||||
"golang.org/x/tools/gopls/internal/lsp/protocol"
|
||||
. "golang.org/x/tools/gopls/internal/lsp/regtest"
|
||||
"golang.org/x/tools/internal/typeparams"
|
||||
)
|
||||
|
||||
func TestBadURICrash_VSCodeIssue1498(t *testing.T) {
|
||||
|
|
@ -40,3 +44,113 @@ func main() {}
|
|||
}
|
||||
})
|
||||
}
|
||||
|
||||
// fix bug involving type parameters and regular parameters
|
||||
// (golang/vscode-go#2527)
|
||||
func TestSemantic_2527(t *testing.T) {
|
||||
if !typeparams.Enabled {
|
||||
t.Skip("type parameters are needed for this test")
|
||||
}
|
||||
// these are the expected types of identfiers in textt order
|
||||
want := []result{
|
||||
{"package", "keyword", ""},
|
||||
{"foo", "namespace", ""},
|
||||
{"func", "keyword", ""},
|
||||
{"Add", "function", "definition deprecated"},
|
||||
{"T", "typeParameter", "definition"},
|
||||
{"int", "type", "defaultLibrary"},
|
||||
{"target", "parameter", "definition"},
|
||||
{"T", "typeParameter", ""},
|
||||
{"l", "parameter", "definition"},
|
||||
{"T", "typeParameter", ""},
|
||||
{"T", "typeParameter", ""},
|
||||
{"return", "keyword", ""},
|
||||
{"append", "function", "defaultLibrary"},
|
||||
{"l", "parameter", ""},
|
||||
{"target", "parameter", ""},
|
||||
{"for", "keyword", ""},
|
||||
{"range", "keyword", ""},
|
||||
{"l", "parameter", ""},
|
||||
{"return", "keyword", ""},
|
||||
{"nil", "variable", "readonly defaultLibrary"},
|
||||
}
|
||||
src := `
|
||||
-- go.mod --
|
||||
module example.com
|
||||
|
||||
go 1.19
|
||||
-- main.go --
|
||||
package foo
|
||||
// Deprecated (for testing)
|
||||
func Add[T int](target T, l []T) []T {
|
||||
return append(l, target)
|
||||
for range l {} // test coverage
|
||||
return nil
|
||||
}
|
||||
`
|
||||
WithOptions(
|
||||
Modes(Default),
|
||||
Settings{"semanticTokens": true},
|
||||
).Run(t, src, func(t *testing.T, env *Env) {
|
||||
env.OpenFile("main.go")
|
||||
//env.Await(EmptyOrNoDiagnostics("main.go`"))
|
||||
env.Await(env.AfterChange(env.DiagnosticAtRegexp("main.go", "for range")))
|
||||
p := &protocol.SemanticTokensParams{
|
||||
TextDocument: protocol.TextDocumentIdentifier{
|
||||
URI: env.Sandbox.Workdir.URI("main.go"),
|
||||
},
|
||||
}
|
||||
v, err := env.Editor.Server.SemanticTokensFull(env.Ctx, p)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
seen := interpret(v.Data, env.Editor.BufferText("main.go"))
|
||||
if x := cmp.Diff(want, seen); x != "" {
|
||||
t.Errorf("Semantic tokens do not match (-want +got):\n%s", x)
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
type result struct {
|
||||
Token string
|
||||
TokenType string
|
||||
Mod string
|
||||
}
|
||||
|
||||
// human-readable version of the semantic tokens
|
||||
// comment, string, number are elided
|
||||
// (and in the future, maybe elide other things, like operators)
|
||||
func interpret(x []uint32, contents string) []result {
|
||||
lines := strings.Split(contents, "\n")
|
||||
ans := []result{}
|
||||
line, col := 1, 1
|
||||
for i := 0; i < len(x); i += 5 {
|
||||
line += int(x[i])
|
||||
col += int(x[i+1])
|
||||
if x[i] != 0 { // new line
|
||||
col = int(x[i+1]) + 1 // 1-based column numbers
|
||||
}
|
||||
sz := x[i+2]
|
||||
t := semanticTypes[x[i+3]]
|
||||
if t == "comment" || t == "string" || t == "number" {
|
||||
continue
|
||||
}
|
||||
l := x[i+4]
|
||||
var mods []string
|
||||
for i, mod := range semanticModifiers {
|
||||
if l&(1<<i) != 0 {
|
||||
mods = append(mods, mod)
|
||||
}
|
||||
}
|
||||
// col is a utf-8 offset
|
||||
tok := lines[line-1][col-1 : col-1+int(sz)]
|
||||
ans = append(ans, result{tok, t, strings.Join(mods, " ")})
|
||||
}
|
||||
return ans
|
||||
}
|
||||
|
||||
var (
|
||||
semanticTypes = lsp.SemanticTypes()
|
||||
semanticModifiers = lsp.SemanticModifiers()
|
||||
)
|
||||
|
|
|
|||
Loading…
Reference in New Issue