mirror of https://github.com/golang/go.git
internal/lsp: use source.Offset instead of tok.Offset
This isn't strictly necessary for some of the cases, but it's better to use it in all cases. Also added a test to ensure that we avoid (*token.File).Offset in all of gopls--test was probably overkill, but it was quick to write. Change-Id: I6dd0126e2211796d5de4e7a389386d7aa81014f0 Reviewed-on: https://go-review.googlesource.com/c/tools/+/353890 Run-TryBot: Rebecca Stambler <rstambler@golang.org> Trust: Rebecca Stambler <rstambler@golang.org> gopls-CI: kokoro <noreply+kokoro@google.com> TryBot-Result: Go Bot <gobot@golang.org> Reviewed-by: Robert Findley <rfindley@google.com>
This commit is contained in:
parent
0b930fbaf4
commit
94178a22b2
|
|
@ -782,7 +782,10 @@ func fixMissingCurlies(f *ast.File, b *ast.BlockStmt, parent ast.Node, tok *toke
|
|||
// If the "{" is already in the source code, there isn't anything to
|
||||
// fix since we aren't missing curlies.
|
||||
if b.Lbrace.IsValid() {
|
||||
braceOffset := tok.Offset(b.Lbrace)
|
||||
braceOffset, err := source.Offset(tok, b.Lbrace)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
if braceOffset < len(src) && src[braceOffset] == '{' {
|
||||
return nil
|
||||
}
|
||||
|
|
@ -834,7 +837,11 @@ func fixMissingCurlies(f *ast.File, b *ast.BlockStmt, parent ast.Node, tok *toke
|
|||
|
||||
var buf bytes.Buffer
|
||||
buf.Grow(len(src) + 3)
|
||||
buf.Write(src[:tok.Offset(insertPos)])
|
||||
offset, err := source.Offset(tok, insertPos)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
buf.Write(src[:offset])
|
||||
|
||||
// Detect if we need to insert a semicolon to fix "for" loop situations like:
|
||||
//
|
||||
|
|
@ -854,7 +861,7 @@ func fixMissingCurlies(f *ast.File, b *ast.BlockStmt, parent ast.Node, tok *toke
|
|||
// Insert "{}" at insertPos.
|
||||
buf.WriteByte('{')
|
||||
buf.WriteByte('}')
|
||||
buf.Write(src[tok.Offset(insertPos):])
|
||||
buf.Write(src[offset:])
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
|
|
@ -888,7 +895,10 @@ func fixEmptySwitch(body *ast.BlockStmt, tok *token.File, src []byte) {
|
|||
|
||||
// If the right brace is actually in the source code at the
|
||||
// specified position, don't mess with it.
|
||||
braceOffset := tok.Offset(body.Rbrace)
|
||||
braceOffset, err := source.Offset(tok, body.Rbrace)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if braceOffset < len(src) && src[braceOffset] == '}' {
|
||||
return
|
||||
}
|
||||
|
|
@ -923,8 +933,12 @@ func fixDanglingSelector(s *ast.SelectorExpr, tok *token.File, src []byte) []byt
|
|||
return nil
|
||||
}
|
||||
|
||||
insertOffset, err := source.Offset(tok, s.X.End())
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
// Insert directly after the selector's ".".
|
||||
insertOffset := tok.Offset(s.X.End()) + 1
|
||||
insertOffset++
|
||||
if src[insertOffset-1] != '.' {
|
||||
return nil
|
||||
}
|
||||
|
|
@ -980,7 +994,10 @@ func isPhantomUnderscore(id *ast.Ident, tok *token.File, src []byte) bool {
|
|||
|
||||
// Phantom underscore means the underscore is not actually in the
|
||||
// program text.
|
||||
offset := tok.Offset(id.Pos())
|
||||
offset, err := source.Offset(tok, id.Pos())
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return len(src) <= offset || src[offset] != '_'
|
||||
}
|
||||
|
||||
|
|
@ -995,11 +1012,15 @@ func fixInitStmt(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte)
|
|||
}
|
||||
|
||||
// Try to extract a statement from the BadExpr.
|
||||
// Make sure that the positions are in range first.
|
||||
if !source.InRange(tok, bad.Pos()) || !source.InRange(tok, bad.End()-1) {
|
||||
start, err := source.Offset(tok, bad.Pos())
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
stmtBytes := src[tok.Offset(bad.Pos()) : tok.Offset(bad.End()-1)+1]
|
||||
end, err := source.Offset(tok, bad.End()-1)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
stmtBytes := src[start : end+1]
|
||||
stmt, err := parseStmt(bad.Pos(), stmtBytes)
|
||||
if err != nil {
|
||||
return
|
||||
|
|
@ -1039,7 +1060,11 @@ func fixInitStmt(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte)
|
|||
// readKeyword reads the keyword starting at pos, if any.
|
||||
func readKeyword(pos token.Pos, tok *token.File, src []byte) string {
|
||||
var kwBytes []byte
|
||||
for i := tok.Offset(pos); i < len(src); i++ {
|
||||
offset, err := source.Offset(tok, pos)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
for i := offset; i < len(src); i++ {
|
||||
// Use a simplified identifier check since keywords are always lowercase ASCII.
|
||||
if src[i] < 'a' || src[i] > 'z' {
|
||||
break
|
||||
|
|
@ -1076,15 +1101,15 @@ func fixArrayType(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte
|
|||
// Avoid doing tok.Offset(to) since that panics if badExpr ends at EOF.
|
||||
// It also panics if the position is not in the range of the file, and
|
||||
// badExprs may not necessarily have good positions, so check first.
|
||||
if !source.InRange(tok, from) {
|
||||
fromOffset, err := source.Offset(tok, from)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
if !source.InRange(tok, to-1) {
|
||||
toOffset, err := source.Offset(tok, to-1)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
fromOffset := tok.Offset(from)
|
||||
toOffset := tok.Offset(to-1) + 1
|
||||
exprBytes = append(exprBytes, src[fromOffset:toOffset]...)
|
||||
exprBytes = append(exprBytes, src[fromOffset:toOffset+1]...)
|
||||
exprBytes = bytes.TrimSpace(exprBytes)
|
||||
|
||||
// If our expression ends in "]" (e.g. "[]"), add a phantom selector
|
||||
|
|
@ -1237,18 +1262,26 @@ FindTo:
|
|||
}
|
||||
}
|
||||
|
||||
if !from.IsValid() || tok.Offset(from) >= len(src) {
|
||||
fromOffset, err := source.Offset(tok, from)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
if !from.IsValid() || fromOffset >= len(src) {
|
||||
return false
|
||||
}
|
||||
|
||||
if !to.IsValid() || tok.Offset(to) >= len(src) {
|
||||
toOffset, err := source.Offset(tok, to)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
if !to.IsValid() || toOffset >= len(src) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Insert any phantom selectors needed to prevent dangling "." from messing
|
||||
// up the AST.
|
||||
exprBytes := make([]byte, 0, int(to-from)+len(phantomSelectors))
|
||||
for i, b := range src[tok.Offset(from):tok.Offset(to)] {
|
||||
for i, b := range src[fromOffset:toOffset] {
|
||||
if len(phantomSelectors) > 0 && from+token.Pos(i) == phantomSelectors[0] {
|
||||
exprBytes = append(exprBytes, '_')
|
||||
phantomSelectors = phantomSelectors[1:]
|
||||
|
|
|
|||
|
|
@ -268,7 +268,10 @@ func locInRange(f *token.File, loc token.Pos) bool {
|
|||
func (e *encoded) srcLine(x ast.Node) string {
|
||||
file := e.pgf.Tok
|
||||
line := file.Line(x.Pos())
|
||||
start := file.Offset(file.LineStart(line))
|
||||
start, err := source.Offset(file, file.LineStart(line))
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
end := start
|
||||
for ; end < len(e.pgf.Src) && e.pgf.Src[end] != '\n'; end++ {
|
||||
|
||||
|
|
|
|||
|
|
@ -80,12 +80,15 @@ func packageCompletionSurrounding(ctx context.Context, fset *token.FileSet, pgf
|
|||
return nil, fmt.Errorf("unparseable file (%s)", pgf.URI)
|
||||
}
|
||||
tok := fset.File(expr.Pos())
|
||||
offset := pgf.Tok.Offset(pos)
|
||||
offset, err := source.Offset(pgf.Tok, pos)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if offset > tok.Size() {
|
||||
debug.Bug(ctx, "out of bounds cursor", "cursor offset (%d) out of bounds for %s (size: %d)", offset, pgf.URI, tok.Size())
|
||||
return nil, fmt.Errorf("cursor out of bounds")
|
||||
}
|
||||
cursor := tok.Pos(pgf.Tok.Offset(pos))
|
||||
cursor := tok.Pos(offset)
|
||||
m := &protocol.ColumnMapper{
|
||||
URI: pgf.URI,
|
||||
Content: pgf.Src,
|
||||
|
|
|
|||
|
|
@ -63,7 +63,11 @@ func extractVariable(fset *token.FileSet, rng span.Range, src []byte, file *ast.
|
|||
if tok == nil {
|
||||
return nil, fmt.Errorf("no file for pos %v", fset.Position(file.Pos()))
|
||||
}
|
||||
newLineIndent := "\n" + calculateIndentation(src, tok, insertBeforeStmt)
|
||||
indent, err := calculateIndentation(src, tok, insertBeforeStmt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
newLineIndent := "\n" + indent
|
||||
|
||||
lhs := strings.Join(lhsNames, ", ")
|
||||
assignStmt := &ast.AssignStmt{
|
||||
|
|
@ -128,11 +132,17 @@ func CanExtractVariable(rng span.Range, file *ast.File) (ast.Expr, []ast.Node, b
|
|||
// When inserting lines of code, we must ensure that the lines have consistent
|
||||
// formatting (i.e. the proper indentation). To do so, we observe the indentation on the
|
||||
// line of code on which the insertion occurs.
|
||||
func calculateIndentation(content []byte, tok *token.File, insertBeforeStmt ast.Node) string {
|
||||
func calculateIndentation(content []byte, tok *token.File, insertBeforeStmt ast.Node) (string, error) {
|
||||
line := tok.Line(insertBeforeStmt.Pos())
|
||||
lineOffset := tok.Offset(tok.LineStart(line))
|
||||
stmtOffset := tok.Offset(insertBeforeStmt.Pos())
|
||||
return string(content[lineOffset:stmtOffset])
|
||||
lineOffset, err := Offset(tok, tok.LineStart(line))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
stmtOffset, err := Offset(tok, insertBeforeStmt.Pos())
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(content[lineOffset:stmtOffset]), nil
|
||||
}
|
||||
|
||||
// generateAvailableIdentifier adjusts the new function name until there are no collisons in scope.
|
||||
|
|
@ -390,8 +400,14 @@ func extractFunctionMethod(fset *token.FileSet, rng span.Range, src []byte, file
|
|||
|
||||
// We put the selection in a constructed file. We can then traverse and edit
|
||||
// the extracted selection without modifying the original AST.
|
||||
startOffset := tok.Offset(rng.Start)
|
||||
endOffset := tok.Offset(rng.End)
|
||||
startOffset, err := Offset(tok, rng.Start)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
endOffset, err := Offset(tok, rng.End)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
selection := src[startOffset:endOffset]
|
||||
extractedBlock, err := parseBlockStmt(fset, selection)
|
||||
if err != nil {
|
||||
|
|
@ -584,11 +600,21 @@ func extractFunctionMethod(fset *token.FileSet, rng span.Range, src []byte, file
|
|||
|
||||
// We're going to replace the whole enclosing function,
|
||||
// so preserve the text before and after the selected block.
|
||||
outerStart := tok.Offset(outer.Pos())
|
||||
outerEnd := tok.Offset(outer.End())
|
||||
outerStart, err := Offset(tok, outer.Pos())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
outerEnd, err := Offset(tok, outer.End())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
before := src[outerStart:startOffset]
|
||||
after := src[endOffset:outerEnd]
|
||||
newLineIndent := "\n" + calculateIndentation(src, tok, start)
|
||||
indent, err := calculateIndentation(src, tok, start)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
newLineIndent := "\n" + indent
|
||||
|
||||
var fullReplacement strings.Builder
|
||||
fullReplacement.Write(before)
|
||||
|
|
@ -634,8 +660,11 @@ func extractFunctionMethod(fset *token.FileSet, rng span.Range, src []byte, file
|
|||
// their cursors for whitespace. To support this use case, we must manually adjust the
|
||||
// ranges to match the correct AST node. In this particular example, we would adjust
|
||||
// rng.Start forward by one byte, and rng.End backwards by two bytes.
|
||||
func adjustRangeForWhitespace(rng span.Range, tok *token.File, content []byte) span.Range {
|
||||
offset := tok.Offset(rng.Start)
|
||||
func adjustRangeForWhitespace(rng span.Range, tok *token.File, content []byte) (span.Range, error) {
|
||||
offset, err := Offset(tok, rng.Start)
|
||||
if err != nil {
|
||||
return span.Range{}, err
|
||||
}
|
||||
for offset < len(content) {
|
||||
if !unicode.IsSpace(rune(content[offset])) {
|
||||
break
|
||||
|
|
@ -646,7 +675,10 @@ func adjustRangeForWhitespace(rng span.Range, tok *token.File, content []byte) s
|
|||
rng.Start = tok.Pos(offset)
|
||||
|
||||
// Move backwards to find a non-whitespace character.
|
||||
offset = tok.Offset(rng.End)
|
||||
offset, err = Offset(tok, rng.End)
|
||||
if err != nil {
|
||||
return span.Range{}, err
|
||||
}
|
||||
for o := offset - 1; 0 <= o && o < len(content); o-- {
|
||||
if !unicode.IsSpace(rune(content[o])) {
|
||||
break
|
||||
|
|
@ -654,7 +686,7 @@ func adjustRangeForWhitespace(rng span.Range, tok *token.File, content []byte) s
|
|||
offset = o
|
||||
}
|
||||
rng.End = tok.Pos(offset)
|
||||
return rng
|
||||
return rng, nil
|
||||
}
|
||||
|
||||
// findParent finds the parent AST node of the given target node, if the target is a
|
||||
|
|
@ -916,7 +948,11 @@ func CanExtractFunction(fset *token.FileSet, rng span.Range, src []byte, file *a
|
|||
if tok == nil {
|
||||
return nil, false, false, fmt.Errorf("no file for pos %v", fset.Position(file.Pos()))
|
||||
}
|
||||
rng = adjustRangeForWhitespace(rng, tok, src)
|
||||
var err error
|
||||
rng, err = adjustRangeForWhitespace(rng, tok, src)
|
||||
if err != nil {
|
||||
return nil, false, false, err
|
||||
}
|
||||
path, _ := astutil.PathEnclosingInterval(file, rng.Start, rng.End)
|
||||
if len(path) == 0 {
|
||||
return nil, false, false, fmt.Errorf("no path enclosing interval")
|
||||
|
|
|
|||
|
|
@ -153,7 +153,10 @@ func ComputeOneImportFixEdits(snapshot Snapshot, pgf *ParsedGoFile, fix *imports
|
|||
|
||||
func computeFixEdits(snapshot Snapshot, pgf *ParsedGoFile, options *imports.Options, fixes []*imports.ImportFix) ([]protocol.TextEdit, error) {
|
||||
// trim the original data to match fixedData
|
||||
left := importPrefix(pgf.Src)
|
||||
left, err := importPrefix(pgf.Src)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
extra := !strings.Contains(left, "\n") // one line may have more than imports
|
||||
if extra {
|
||||
left = string(pgf.Src)
|
||||
|
|
@ -185,25 +188,30 @@ func computeFixEdits(snapshot Snapshot, pgf *ParsedGoFile, options *imports.Opti
|
|||
// importPrefix returns the prefix of the given file content through the final
|
||||
// import statement. If there are no imports, the prefix is the package
|
||||
// statement and any comment groups below it.
|
||||
func importPrefix(src []byte) string {
|
||||
func importPrefix(src []byte) (string, error) {
|
||||
fset := token.NewFileSet()
|
||||
// do as little parsing as possible
|
||||
f, err := parser.ParseFile(fset, "", src, parser.ImportsOnly|parser.ParseComments)
|
||||
if err != nil { // This can happen if 'package' is misspelled
|
||||
return ""
|
||||
return "", fmt.Errorf("importPrefix: failed to parse: %s", err)
|
||||
}
|
||||
tok := fset.File(f.Pos())
|
||||
var importEnd int
|
||||
for _, d := range f.Decls {
|
||||
if x, ok := d.(*ast.GenDecl); ok && x.Tok == token.IMPORT {
|
||||
if e := tok.Offset(d.End()); e > importEnd {
|
||||
if e, err := Offset(tok, d.End()); err != nil {
|
||||
return "", fmt.Errorf("importPrefix: %s", err)
|
||||
} else if e > importEnd {
|
||||
importEnd = e
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
maybeAdjustToLineEnd := func(pos token.Pos, isCommentNode bool) int {
|
||||
offset := tok.Offset(pos)
|
||||
offset, err := Offset(tok, pos)
|
||||
if err != nil {
|
||||
return -1
|
||||
}
|
||||
|
||||
// Don't go past the end of the file.
|
||||
if offset > len(src) {
|
||||
|
|
@ -215,7 +223,10 @@ func importPrefix(src []byte) string {
|
|||
// return a position on the next line whenever possible.
|
||||
switch line := tok.Line(tok.Pos(offset)); {
|
||||
case line < tok.LineCount():
|
||||
nextLineOffset := tok.Offset(tok.LineStart(line + 1))
|
||||
nextLineOffset, err := Offset(tok, tok.LineStart(line+1))
|
||||
if err != nil {
|
||||
return -1
|
||||
}
|
||||
// If we found a position that is at the end of a line, move the
|
||||
// offset to the start of the next line.
|
||||
if offset+1 == nextLineOffset {
|
||||
|
|
@ -234,14 +245,19 @@ func importPrefix(src []byte) string {
|
|||
}
|
||||
for _, cgroup := range f.Comments {
|
||||
for _, c := range cgroup.List {
|
||||
if end := tok.Offset(c.End()); end > importEnd {
|
||||
if end, err := Offset(tok, c.End()); err != nil {
|
||||
return "", err
|
||||
} else if end > importEnd {
|
||||
startLine := tok.Position(c.Pos()).Line
|
||||
endLine := tok.Position(c.End()).Line
|
||||
|
||||
// Work around golang/go#41197 by checking if the comment might
|
||||
// contain "\r", and if so, find the actual end position of the
|
||||
// comment by scanning the content of the file.
|
||||
startOffset := tok.Offset(c.Pos())
|
||||
startOffset, err := Offset(tok, c.Pos())
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if startLine != endLine && bytes.Contains(src[startOffset:], []byte("\r")) {
|
||||
if commentEnd := scanForCommentEnd(src[startOffset:]); commentEnd > 0 {
|
||||
end = startOffset + commentEnd
|
||||
|
|
@ -254,7 +270,7 @@ func importPrefix(src []byte) string {
|
|||
if importEnd > len(src) {
|
||||
importEnd = len(src)
|
||||
}
|
||||
return string(src[:importEnd])
|
||||
return string(src[:importEnd]), nil
|
||||
}
|
||||
|
||||
// scanForCommentEnd returns the offset of the end of the multi-line comment
|
||||
|
|
|
|||
|
|
@ -35,7 +35,10 @@ func TestImportPrefix(t *testing.T) {
|
|||
{"package x; import \"os\"; func f() {}\n\n", "package x; import \"os\""},
|
||||
{"package x; func f() {fmt.Println()}\n\n", "package x"},
|
||||
} {
|
||||
got := importPrefix([]byte(tt.input))
|
||||
got, err := importPrefix([]byte(tt.input))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if got != tt.want {
|
||||
t.Errorf("%d: failed for %q:\n%s", i, tt.input, diffStr(t, tt.want, got))
|
||||
}
|
||||
|
|
@ -62,7 +65,10 @@ Hi description
|
|||
*/`,
|
||||
},
|
||||
} {
|
||||
got := importPrefix([]byte(strings.ReplaceAll(tt.input, "\n", "\r\n")))
|
||||
got, err := importPrefix([]byte(strings.ReplaceAll(tt.input, "\n", "\r\n")))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
want := strings.ReplaceAll(tt.want, "\n", "\r\n")
|
||||
if got != want {
|
||||
t.Errorf("%d: failed for %q:\n%s", i, tt.input, diffStr(t, want, got))
|
||||
|
|
|
|||
|
|
@ -203,8 +203,14 @@ func findRune(ctx context.Context, snapshot Snapshot, fh FileHandle, position pr
|
|||
// It's a string, scan only if it contains a unicode escape sequence under or before the
|
||||
// current cursor position.
|
||||
var found bool
|
||||
litOffset := pgf.Tok.Offset(lit.Pos())
|
||||
offset := pgf.Tok.Offset(pos)
|
||||
litOffset, err := Offset(pgf.Tok, lit.Pos())
|
||||
if err != nil {
|
||||
return 0, MappedRange{}, err
|
||||
}
|
||||
offset, err := Offset(pgf.Tok, pos)
|
||||
if err != nil {
|
||||
return 0, MappedRange{}, err
|
||||
}
|
||||
for i := offset - litOffset; i > 0; i-- {
|
||||
// Start at the cursor position and search backward for the beginning of a rune escape sequence.
|
||||
rr, _ := utf8.DecodeRuneInString(lit.Value[i:])
|
||||
|
|
@ -486,17 +492,27 @@ func HoverInfo(ctx context.Context, s Snapshot, pkg Package, obj types.Object, p
|
|||
// obj may not have been produced by type checking the AST containing
|
||||
// node, so we need to be careful about using token.Pos.
|
||||
tok := s.FileSet().File(obj.Pos())
|
||||
offset := tok.Offset(obj.Pos())
|
||||
offset, err := Offset(tok, obj.Pos())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tok2 := s.FileSet().File(node.Pos())
|
||||
var spec ast.Spec
|
||||
for _, s := range node.Specs {
|
||||
// Avoid panics by guarding the calls to token.Offset (golang/go#48249).
|
||||
if InRange(tok2, s.Pos()) && InRange(tok2, s.End()) && tok2.Offset(s.Pos()) <= offset && offset <= tok2.Offset(s.End()) {
|
||||
start, err := Offset(tok2, s.Pos())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
end, err := Offset(tok2, s.End())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if start <= offset && offset <= end {
|
||||
spec = s
|
||||
break
|
||||
}
|
||||
}
|
||||
var err error
|
||||
info, err = formatGenDecl(node, spec, obj, obj.Type())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
|||
|
|
@ -331,7 +331,10 @@ func fullNode(snapshot Snapshot, obj types.Object, pkg Package) (ast.Decl, error
|
|||
fset := snapshot.FileSet()
|
||||
file2, _ := parser.ParseFile(fset, tok.Name(), pgf.Src, parser.AllErrors|parser.ParseComments)
|
||||
if file2 != nil {
|
||||
offset := tok.Offset(obj.Pos())
|
||||
offset, err := Offset(tok, obj.Pos())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
file = file2
|
||||
tok2 := fset.File(file2.Pos())
|
||||
pos = tok2.Pos(offset)
|
||||
|
|
|
|||
|
|
@ -223,7 +223,6 @@ func qualifiedObjsAtProtocolPos(ctx context.Context, s Snapshot, uri span.URI, p
|
|||
return nil, errNoObjectFound
|
||||
}
|
||||
pkg := pkgs[0]
|
||||
var offset int
|
||||
pgf, err := pkg.File(uri)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -236,7 +235,10 @@ func qualifiedObjsAtProtocolPos(ctx context.Context, s Snapshot, uri span.URI, p
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
offset = pgf.Tok.Offset(rng.Start)
|
||||
offset, err := Offset(pgf.Tok, rng.Start)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return qualifiedObjsAtLocation(ctx, s, objSearchKey{uri, offset}, map[objSearchKey]bool{})
|
||||
}
|
||||
|
||||
|
|
@ -350,7 +352,11 @@ func qualifiedObjsAtLocation(ctx context.Context, s Snapshot, key objSearchKey,
|
|||
offset := -1
|
||||
for _, pgf := range pkg.CompiledGoFiles() {
|
||||
if pgf.Tok.Base() <= int(pos) && int(pos) <= pgf.Tok.Base()+pgf.Tok.Size() {
|
||||
offset = pgf.Tok.Offset(pos)
|
||||
var err error
|
||||
offset, err = Offset(pgf.Tok, pos)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
uri = pgf.URI
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,71 @@
|
|||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package source_test
|
||||
|
||||
import (
|
||||
"go/token"
|
||||
"go/types"
|
||||
"testing"
|
||||
|
||||
"golang.org/x/tools/go/packages"
|
||||
)
|
||||
|
||||
// This test reports any unexpected uses of (*go/token.File).Offset within
|
||||
// the gopls codebase to ensure that we don't check in more code that is prone
|
||||
// to panicking. All calls to (*go/token.File).Offset should be replaced with
|
||||
// calls to source.Offset.
|
||||
func TestTokenOffset(t *testing.T) {
|
||||
fset := token.NewFileSet()
|
||||
pkgs, err := packages.Load(&packages.Config{
|
||||
Fset: fset,
|
||||
Mode: packages.NeedName | packages.NeedModule | packages.NeedCompiledGoFiles | packages.NeedTypes | packages.NeedTypesInfo | packages.NeedSyntax | packages.NeedImports | packages.NeedDeps,
|
||||
}, "go/token", "golang.org/x/tools/internal/lsp/...", "golang.org/x/tools/gopls/...")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
var tokPkg *packages.Package
|
||||
for _, pkg := range pkgs {
|
||||
if pkg.PkgPath == "go/token" {
|
||||
tokPkg = pkg
|
||||
break
|
||||
}
|
||||
}
|
||||
typname, ok := tokPkg.Types.Scope().Lookup("File").(*types.TypeName)
|
||||
if !ok {
|
||||
t.Fatal("expected go/token.File typename, got none")
|
||||
}
|
||||
named, ok := typname.Type().(*types.Named)
|
||||
if !ok {
|
||||
t.Fatalf("expected named type, got %T", typname.Type)
|
||||
}
|
||||
var offset *types.Func
|
||||
for i := 0; i < named.NumMethods(); i++ {
|
||||
meth := named.Method(i)
|
||||
if meth.Name() == "Offset" {
|
||||
offset = meth
|
||||
break
|
||||
}
|
||||
}
|
||||
for _, pkg := range pkgs {
|
||||
for ident, obj := range pkg.TypesInfo.Uses {
|
||||
if ident.Name != "Offset" {
|
||||
continue
|
||||
}
|
||||
if pkg.PkgPath == "go/token" {
|
||||
continue
|
||||
}
|
||||
if !types.Identical(offset.Type(), obj.Type()) {
|
||||
continue
|
||||
}
|
||||
// The only permitted use is in golang.org/x/tools/internal/lsp/source.Offset,
|
||||
// so check the enclosing function.
|
||||
sourceOffset := pkg.Types.Scope().Lookup("Offset").(*types.Func)
|
||||
if sourceOffset.Pos() <= ident.Pos() && ident.Pos() <= sourceOffset.Scope().End() {
|
||||
continue // accepted usage
|
||||
}
|
||||
t.Errorf(`%s: Unexpected use of (*go/token.File).Offset. Please use golang.org/x/tools/internal/lsp/source.Offset instead.`, fset.Position(ident.Pos()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -6,6 +6,7 @@ package source
|
|||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/printer"
|
||||
"go/token"
|
||||
|
|
@ -543,6 +544,15 @@ func IsCommandLineArguments(s string) bool {
|
|||
return strings.Contains(s, "command-line-arguments")
|
||||
}
|
||||
|
||||
// Offset returns tok.Offset(pos), but it also checks that the pos is in range
|
||||
// for the given file.
|
||||
func Offset(tok *token.File, pos token.Pos) (int, error) {
|
||||
if !InRange(tok, pos) {
|
||||
return -1, fmt.Errorf("pos %v is not in range for file [%v:%v)", pos, tok.Base(), tok.Base()+tok.Size())
|
||||
}
|
||||
return tok.Offset(pos), nil
|
||||
}
|
||||
|
||||
// InRange reports whether the given position is in the given token.File.
|
||||
func InRange(tok *token.File, pos token.Pos) bool {
|
||||
size := tok.Pos(tok.Size())
|
||||
|
|
|
|||
Loading…
Reference in New Issue