internal/lsp/cache: trim more stuff in ParseExported mode

Despite the name, ParseExported only hollowed out declarations -- it
didn't actually drop any from the AST. This leaves a fair amount of
unexported crud behind. Unfortunately, there are a *lot* of ways to
expose an unexported declaration from an exported one, and it can be
done across files. Because of that, discarding unexported declarations
requires a lot of work.

This CL implements a decent attempt at pruning as much as possible from
the AST in ParseExported mode.

First, we analyze the AST of all the files in the package for exported
uses of unexported identifiers, iterating to a fixed point. Then, we
type check those ASTs. If there are missing identifiers (probably due to
a bug in the dependency analysis) we use those errors to re-parse. After
that we give up and fall back to the older, less effective trimming. The
pkg type changes slightly to accomodate the new control flow.

We have to analyze all the files at once because an unexported type
might be exposed in another file. Unfortunately, that means we can't
parse a single file at a time any more -- the result of parsing a file
depends on the result of parsing its siblings. To avoid cache
corruption, we have to do the parsing directly in type checking,
uncached.

This, in turn, required changes to the PosTo* functions. Previously,
they operated just on files, but a file name is no longer sufficient to
get a ParseExported AST. Change them to work on Packages instead. I
squeezed in a bit of refactoring while I was touching them.

Change-Id: I61249144ffa43ad645ed38d79e873e3998b0f38d
Reviewed-on: https://go-review.googlesource.com/c/tools/+/312471
Trust: Heschi Kreinick <heschi@google.com>
Run-TryBot: Heschi Kreinick <heschi@google.com>
gopls-CI: kokoro <noreply+kokoro@google.com>
TryBot-Result: Go Bot <gobot@golang.org>
Reviewed-by: Robert Findley <rfindley@google.com>
Reviewed-by: Rebecca Stambler <rstambler@golang.org>
This commit is contained in:
Heschi Kreinick 2021-04-12 18:44:46 -04:00
parent 2db0265cb2
commit cd1d0887dc
13 changed files with 739 additions and 306 deletions

View File

@ -403,7 +403,7 @@ func (s *snapshot) DiagnosePackage(ctx context.Context, spkg source.Package) (ma
analyzers = append(analyzers, a)
}
var errorAnalyzerDiag []*source.Diagnostic
if pkg.hasTypeErrors {
if pkg.HasTypeErrors() {
var err error
errorAnalyzerDiag, err = s.Analyze(ctx, pkg.ID(), analyzers)
if err != nil {

View File

@ -9,7 +9,6 @@ import (
"context"
"fmt"
"go/ast"
"go/scanner"
"go/types"
"path"
"path/filepath"
@ -303,26 +302,34 @@ func (s *snapshot) parseGoHandles(ctx context.Context, files []span.URI, mode so
}
func typeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode source.ParseMode, deps map[packagePath]*packageHandle) (*pkg, error) {
ctx, done := event.Start(ctx, "cache.importer.typeCheck", tag.Package.Of(string(m.id)))
defer done()
var pkg *pkg
var err error
fset := snapshot.view.session.cache.fset
pkg := &pkg{
m: m,
mode: mode,
goFiles: make([]*source.ParsedGoFile, len(m.goFiles)),
compiledGoFiles: make([]*source.ParsedGoFile, len(m.compiledGoFiles)),
imports: make(map[packagePath]*pkg),
typesSizes: m.typesSizes,
typesInfo: &types.Info{
Types: make(map[ast.Expr]types.TypeAndValue),
Defs: make(map[*ast.Ident]types.Object),
Uses: make(map[*ast.Ident]types.Object),
Implicits: make(map[ast.Node]types.Object),
Selections: make(map[*ast.SelectorExpr]*types.Selection),
Scopes: make(map[ast.Node]*types.Scope),
},
var filter *unexportedFilter
if mode == source.ParseExported {
filter = &unexportedFilter{uses: map[string]bool{}}
}
pkg, err = doTypeCheck(ctx, snapshot, m, mode, deps, filter)
if err != nil {
return nil, err
}
if mode == source.ParseExported {
// The AST filtering is a little buggy and may remove things it
// shouldn't. If we only got undeclared name errors, try one more
// time keeping those names.
missing, unexpected := filter.ProcessErrors(pkg.typeErrors)
if len(unexpected) == 0 && len(missing) != 0 {
event.Log(ctx, fmt.Sprintf("discovered missing identifiers: %v", missing), tag.Package.Of(string(m.id)))
pkg, err = doTypeCheck(ctx, snapshot, m, mode, deps, filter)
missing, unexpected = filter.ProcessErrors(pkg.typeErrors)
}
if len(unexpected) != 0 || len(missing) != 0 {
event.Log(ctx, fmt.Sprintf("falling back to safe trimming due to type errors: %v or still-missing identifiers: %v", unexpected, missing), tag.Package.Of(string(m.id)))
pkg, err = doTypeCheck(ctx, snapshot, m, mode, deps, nil)
}
}
// If this is a replaced module in the workspace, the version is
// meaningless, and we don't want clients to access it.
if m.module != nil {
@ -335,157 +342,19 @@ func typeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode source
Version: version,
}
}
var (
files = make([]*ast.File, len(m.compiledGoFiles))
parseErrors = make([]scanner.ErrorList, len(m.compiledGoFiles))
actualErrors = make([]error, len(m.compiledGoFiles))
wg sync.WaitGroup
mu sync.Mutex
haveFixedFiles bool
)
for i, cgf := range m.compiledGoFiles {
wg.Add(1)
go func(i int, cgf span.URI) {
defer wg.Done()
fh, err := snapshot.GetFile(ctx, cgf)
if err != nil {
actualErrors[i] = err
return
}
pgh := snapshot.parseGoHandle(ctx, fh, mode)
pgf, fixed, err := snapshot.parseGo(ctx, pgh)
if err != nil {
actualErrors[i] = err
return
}
pkg.compiledGoFiles[i] = pgf
files[i], parseErrors[i], actualErrors[i] = pgf.File, pgf.ParseErr, err
// If we have fixed parse errors in any of the files, we should hide type
// errors, as they may be completely nonsensical.
mu.Lock()
haveFixedFiles = haveFixedFiles || fixed
mu.Unlock()
}(i, cgf)
}
for i, gf := range m.goFiles {
wg.Add(1)
// We need to parse the non-compiled go files, but we don't care about their errors.
go func(i int, gf span.URI) {
defer wg.Done()
fh, err := snapshot.GetFile(ctx, gf)
if err != nil {
return
}
pgf, _ := snapshot.ParseGo(ctx, fh, mode)
pkg.goFiles[i] = pgf
}(i, gf)
}
wg.Wait()
for _, err := range actualErrors {
if err != nil {
return nil, err
}
}
var i int
for _, e := range parseErrors {
if e != nil {
parseErrors[i] = e
i++
}
}
parseErrors = parseErrors[:i]
i = 0
for _, f := range files {
if f != nil {
files[i] = f
i++
}
}
files = files[:i]
// Use the default type information for the unsafe package.
if pkg.m.pkgPath == "unsafe" {
pkg.types = types.Unsafe
// Don't type check Unsafe: it's unnecessary, and doing so exposes a data
// race to Unsafe.completed.
return pkg, nil
} else if len(files) == 0 { // not the unsafe package, no parsed files
// Try to attach error messages to the file as much as possible.
var found bool
for _, e := range m.errors {
srcDiags, err := goPackagesErrorDiagnostics(snapshot, pkg, e)
if err != nil {
continue
}
found = true
pkg.diagnostics = append(pkg.diagnostics, srcDiags...)
}
if found {
return pkg, nil
}
return nil, errors.Errorf("no parsed files for package %s, expected: %v, errors: %v", pkg.m.pkgPath, pkg.compiledGoFiles, m.errors)
} else {
pkg.types = types.NewPackage(string(m.pkgPath), string(m.name))
}
var typeErrors []types.Error
cfg := &types.Config{
Error: func(e error) {
typeErrors = append(typeErrors, e.(types.Error))
},
Importer: importerFunc(func(pkgPath string) (*types.Package, error) {
// If the context was cancelled, we should abort.
if ctx.Err() != nil {
return nil, ctx.Err()
}
dep := resolveImportPath(pkgPath, pkg, deps)
if dep == nil {
return nil, snapshot.missingPkgError(pkgPath)
}
if !isValidImport(m.pkgPath, dep.m.pkgPath) {
return nil, errors.Errorf("invalid use of internal package %s", pkgPath)
}
depPkg, err := dep.check(ctx, snapshot)
if err != nil {
return nil, err
}
pkg.imports[depPkg.m.pkgPath] = depPkg
return depPkg.types, nil
}),
}
// We want to type check cgo code if go/types supports it.
// We passed typecheckCgo to go/packages when we Loaded.
typesinternal.SetUsesCgo(cfg)
check := types.NewChecker(cfg, fset, pkg.types, pkg.typesInfo)
// Type checking errors are handled via the config, so ignore them here.
_ = check.Files(files)
// If the context was cancelled, we may have returned a ton of transient
// errors to the type checker. Swallow them.
if ctx.Err() != nil {
return nil, ctx.Err()
}
// We don't care about a package's errors unless we have parsed it in full.
if mode != source.ParseFull {
return pkg, nil
}
if len(m.errors) != 0 {
pkg.hasListOrParseErrors = true
for _, e := range m.errors {
diags, err := goPackagesErrorDiagnostics(snapshot, pkg, e)
if err != nil {
event.Error(ctx, "unable to compute positions for list errors", err, tag.Package.Of(pkg.ID()))
continue
}
pkg.diagnostics = append(pkg.diagnostics, diags...)
for _, e := range m.errors {
diags, err := goPackagesErrorDiagnostics(snapshot, pkg, e)
if err != nil {
event.Error(ctx, "unable to compute positions for list errors", err, tag.Package.Of(pkg.ID()))
continue
}
pkg.diagnostics = append(pkg.diagnostics, diags...)
}
// Our heuristic for whether to show type checking errors is:
@ -501,27 +370,25 @@ func typeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode source
// Track URIs with parse errors so that we can suppress type errors for these
// files.
unparseable := map[span.URI]bool{}
if len(parseErrors) != 0 {
pkg.hasListOrParseErrors = true
for _, e := range parseErrors {
diags, err := parseErrorDiagnostics(snapshot, pkg, e)
if err != nil {
event.Error(ctx, "unable to compute positions for parse errors", err, tag.Package.Of(pkg.ID()))
continue
}
for _, diag := range diags {
unparseable[diag.URI] = true
pkg.diagnostics = append(pkg.diagnostics, diag)
}
for _, e := range pkg.parseErrors {
diags, err := parseErrorDiagnostics(snapshot, pkg, e)
if err != nil {
event.Error(ctx, "unable to compute positions for parse errors", err, tag.Package.Of(pkg.ID()))
continue
}
for _, diag := range diags {
unparseable[diag.URI] = true
pkg.diagnostics = append(pkg.diagnostics, diag)
}
}
if haveFixedFiles {
if pkg.hasFixedFiles {
return pkg, nil
}
for _, e := range expandErrors(typeErrors, snapshot.View().Options().RelatedInformationSupported) {
pkg.hasTypeErrors = true
unexpanded := pkg.typeErrors
pkg.typeErrors = nil
for _, e := range expandErrors(unexpanded, snapshot.View().Options().RelatedInformationSupported) {
diags, err := typeErrorDiagnostics(snapshot, pkg, e)
if err != nil {
event.Error(ctx, "unable to compute positions for type errors", err, tag.Package.Of(pkg.ID()))
@ -547,6 +414,172 @@ func typeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode source
return pkg, nil
}
func doTypeCheck(ctx context.Context, snapshot *snapshot, m *metadata, mode source.ParseMode, deps map[packagePath]*packageHandle, astFilter *unexportedFilter) (*pkg, error) {
ctx, done := event.Start(ctx, "cache.typeCheck", tag.Package.Of(string(m.id)))
defer done()
pkg := &pkg{
m: m,
mode: mode,
imports: make(map[packagePath]*pkg),
types: types.NewPackage(string(m.pkgPath), string(m.name)),
typesInfo: &types.Info{
Types: make(map[ast.Expr]types.TypeAndValue),
Defs: make(map[*ast.Ident]types.Object),
Uses: make(map[*ast.Ident]types.Object),
Implicits: make(map[ast.Node]types.Object),
Selections: make(map[*ast.SelectorExpr]*types.Selection),
Scopes: make(map[ast.Node]*types.Scope),
},
typesSizes: m.typesSizes,
}
for _, gf := range pkg.m.goFiles {
// In the presence of line directives, we may need to report errors in
// non-compiled Go files, so we need to register them on the package.
// However, we only need to really parse them in ParseFull mode, when
// the user might actually be looking at the file.
fh, err := snapshot.GetFile(ctx, gf)
if err != nil {
return nil, err
}
goMode := source.ParseFull
if mode != source.ParseFull {
goMode = source.ParseHeader
}
pgf, err := snapshot.ParseGo(ctx, fh, goMode)
if err != nil {
return nil, err
}
pkg.goFiles = append(pkg.goFiles, pgf)
}
if err := parseCompiledGoFiles(ctx, snapshot, mode, pkg, astFilter); err != nil {
return nil, err
}
// Use the default type information for the unsafe package.
if m.pkgPath == "unsafe" {
// Don't type check Unsafe: it's unnecessary, and doing so exposes a data
// race to Unsafe.completed.
pkg.types = types.Unsafe
return pkg, nil
}
if len(m.compiledGoFiles) == 0 {
// No files most likely means go/packages failed. Try to attach error
// messages to the file as much as possible.
var found bool
for _, e := range m.errors {
srcDiags, err := goPackagesErrorDiagnostics(snapshot, pkg, e)
if err != nil {
continue
}
found = true
pkg.diagnostics = append(pkg.diagnostics, srcDiags...)
}
if found {
return pkg, nil
}
return nil, errors.Errorf("no parsed files for package %s, expected: %v, errors: %v", pkg.m.pkgPath, pkg.compiledGoFiles, m.errors)
}
cfg := &types.Config{
Error: func(e error) {
pkg.typeErrors = append(pkg.typeErrors, e.(types.Error))
},
Importer: importerFunc(func(pkgPath string) (*types.Package, error) {
// If the context was cancelled, we should abort.
if ctx.Err() != nil {
return nil, ctx.Err()
}
dep := resolveImportPath(pkgPath, pkg, deps)
if dep == nil {
return nil, snapshot.missingPkgError(pkgPath)
}
if !isValidImport(m.pkgPath, dep.m.pkgPath) {
return nil, errors.Errorf("invalid use of internal package %s", pkgPath)
}
depPkg, err := dep.check(ctx, snapshot)
if err != nil {
return nil, err
}
pkg.imports[depPkg.m.pkgPath] = depPkg
return depPkg.types, nil
}),
}
if mode != source.ParseFull {
cfg.DisableUnusedImportCheck = true
cfg.IgnoreFuncBodies = true
}
// We want to type check cgo code if go/types supports it.
// We passed typecheckCgo to go/packages when we Loaded.
typesinternal.SetUsesCgo(cfg)
check := types.NewChecker(cfg, snapshot.FileSet(), pkg.types, pkg.typesInfo)
var files []*ast.File
for _, cgf := range pkg.compiledGoFiles {
files = append(files, cgf.File)
}
// Type checking errors are handled via the config, so ignore them here.
_ = check.Files(files)
// If the context was cancelled, we may have returned a ton of transient
// errors to the type checker. Swallow them.
if ctx.Err() != nil {
return nil, ctx.Err()
}
return pkg, nil
}
func parseCompiledGoFiles(ctx context.Context, snapshot *snapshot, mode source.ParseMode, pkg *pkg, astFilter *unexportedFilter) error {
for _, cgf := range pkg.m.compiledGoFiles {
fh, err := snapshot.GetFile(ctx, cgf)
if err != nil {
return err
}
var pgf *source.ParsedGoFile
var fixed bool
// Only parse Full through the cache -- we need to own Exported ASTs
// to prune them.
if mode == source.ParseFull {
pgh := snapshot.parseGoHandle(ctx, fh, mode)
pgf, fixed, err = snapshot.parseGo(ctx, pgh)
} else {
d := parseGo(ctx, snapshot.FileSet(), fh, mode)
pgf, fixed, err = d.parsed, d.fixed, d.err
}
if err != nil {
return err
}
pkg.compiledGoFiles = append(pkg.compiledGoFiles, pgf)
if pgf.ParseErr != nil {
pkg.parseErrors = append(pkg.parseErrors, pgf.ParseErr)
}
// If we have fixed parse errors in any of the files, we should hide type
// errors, as they may be completely nonsensical.
pkg.hasFixedFiles = pkg.hasFixedFiles || fixed
}
if mode != source.ParseExported {
return nil
}
if astFilter != nil {
var files []*ast.File
for _, cgf := range pkg.compiledGoFiles {
files = append(files, cgf.File)
}
astFilter.Filter(files)
} else {
for _, cgf := range pkg.compiledGoFiles {
trimAST(cgf.File)
}
}
return nil
}
func (s *snapshot) depsErrors(ctx context.Context, pkg *pkg) ([]*source.Diagnostic, error) {
// Select packages that can't be found, and were imported in non-workspace packages.
// Workspace packages already show their own errors.

View File

@ -12,8 +12,10 @@ import (
"go/parser"
"go/scanner"
"go/token"
"go/types"
"reflect"
"strconv"
"strings"
"golang.org/x/tools/internal/event"
"golang.org/x/tools/internal/lsp/debug/tag"
@ -32,15 +34,10 @@ type parseKey struct {
mode source.ParseMode
}
// astCacheKey is similar to parseKey, but is a distinct type because
// it is used to key a different value within the same map.
type astCacheKey parseKey
type parseGoHandle struct {
handle *memoize.Handle
file source.FileHandle
mode source.ParseMode
astCacheHandle *memoize.Handle
handle *memoize.Handle
file source.FileHandle
mode source.ParseMode
}
type parseGoData struct {
@ -65,16 +62,10 @@ func (s *snapshot) parseGoHandle(ctx context.Context, fh source.FileHandle, mode
return parseGo(ctx, snapshot.view.session.cache.fset, fh, mode)
}, nil)
astHandle := s.generation.Bind(astCacheKey(key), func(ctx context.Context, arg memoize.Arg) interface{} {
snapshot := arg.(*snapshot)
return buildASTCache(ctx, snapshot, parseHandle)
}, nil)
pgh := &parseGoHandle{
handle: parseHandle,
file: fh,
mode: mode,
astCacheHandle: astHandle,
handle: parseHandle,
file: fh,
mode: mode,
}
return s.addGoFile(key, pgh)
}
@ -98,6 +89,9 @@ func (s *snapshot) ParseGo(ctx context.Context, fh source.FileHandle, mode sourc
}
func (s *snapshot) parseGo(ctx context.Context, pgh *parseGoHandle) (*source.ParsedGoFile, bool, error) {
if pgh.mode == source.ParseExported {
panic("only type checking should use Exported")
}
d, err := pgh.handle.Get(ctx, s.generation, s)
if err != nil {
return nil, false, err
@ -106,36 +100,55 @@ func (s *snapshot) parseGo(ctx context.Context, pgh *parseGoHandle) (*source.Par
return data.parsed, data.fixed, data.err
}
func (s *snapshot) PosToDecl(ctx context.Context, pgf *source.ParsedGoFile) (map[token.Pos]ast.Decl, error) {
fh, err := s.GetFile(ctx, pgf.URI)
if err != nil {
return nil, err
}
pgh := s.parseGoHandle(ctx, fh, pgf.Mode)
d, err := pgh.astCacheHandle.Get(ctx, s.generation, s)
if err != nil {
return nil, err
}
data := d.(*astCacheData)
return data.posToDecl, data.err
type astCacheKey struct {
pkg packageHandleKey
uri span.URI
}
func (s *snapshot) PosToField(ctx context.Context, pgf *source.ParsedGoFile) (map[token.Pos]*ast.Field, error) {
fh, err := s.GetFile(ctx, pgf.URI)
func (s *snapshot) astCacheData(ctx context.Context, spkg source.Package, pos token.Pos) (*astCacheData, error) {
pkg := spkg.(*pkg)
pkgHandle := s.getPackage(pkg.m.id, pkg.mode)
if pkgHandle == nil {
return nil, fmt.Errorf("could not reconstruct package handle for %v", pkg.m.id)
}
tok := s.FileSet().File(pos)
if tok == nil {
return nil, fmt.Errorf("no file for pos %v", pos)
}
pgf, err := pkg.File(span.URIFromPath(tok.Name()))
if err != nil {
return nil, err
}
astHandle := s.generation.Bind(astCacheKey{pkgHandle.key, pgf.URI}, func(ctx context.Context, arg memoize.Arg) interface{} {
snapshot := arg.(*snapshot)
return buildASTCache(ctx, snapshot, pgf)
}, nil)
pgh := s.parseGoHandle(ctx, fh, pgf.Mode)
d, err := pgh.astCacheHandle.Get(ctx, s.generation, s)
if err != nil || d == nil {
d, err := astHandle.Get(ctx, s.generation, s)
if err != nil {
return nil, err
}
data := d.(*astCacheData)
return data.posToField, data.err
if data.err != nil {
return nil, data.err
}
return data, nil
}
func (s *snapshot) PosToDecl(ctx context.Context, spkg source.Package, pos token.Pos) (ast.Decl, error) {
data, err := s.astCacheData(ctx, spkg, pos)
if err != nil {
return nil, err
}
return data.posToDecl[pos], nil
}
func (s *snapshot) PosToField(ctx context.Context, spkg source.Package, pos token.Pos) (*ast.Field, error) {
data, err := s.astCacheData(ctx, spkg, pos)
if err != nil {
return nil, err
}
return data.posToField[pos], nil
}
type astCacheData struct {
@ -147,7 +160,7 @@ type astCacheData struct {
// buildASTCache builds caches to aid in quickly going from the typed
// world to the syntactic world.
func buildASTCache(ctx context.Context, snapshot *snapshot, parseHandle *memoize.Handle) *astCacheData {
func buildASTCache(ctx context.Context, snapshot *snapshot, pgf *source.ParsedGoFile) *astCacheData {
var (
// path contains all ancestors, including n.
path []ast.Node
@ -155,21 +168,12 @@ func buildASTCache(ctx context.Context, snapshot *snapshot, parseHandle *memoize
decls []ast.Decl
)
v, err := parseHandle.Get(ctx, snapshot.generation, snapshot)
if err != nil {
return &astCacheData{err: err}
}
file := v.(*parseGoData).parsed.File
if err != nil {
return &astCacheData{err: fmt.Errorf("nil file")}
}
data := &astCacheData{
posToDecl: make(map[token.Pos]ast.Decl),
posToField: make(map[token.Pos]*ast.Field),
}
ast.Inspect(file, func(n ast.Node) bool {
ast.Inspect(pgf.File, func(n ast.Node) bool {
if n == nil {
lastP := path[len(path)-1]
path = path[:len(path)-1]
@ -310,10 +314,6 @@ func parseGo(ctx context.Context, fset *token.FileSet, fh source.FileHandle, mod
}
}
if mode == source.ParseExported {
trimAST(file)
}
return &parseGoData{
parsed: &source.ParsedGoFile{
URI: fh.URI(),
@ -332,6 +332,252 @@ func parseGo(ctx context.Context, fset *token.FileSet, fh source.FileHandle, mod
}
}
// An unexportedFilter removes as much unexported AST from a set of Files as possible.
type unexportedFilter struct {
uses map[string]bool
}
// Filter records uses of unexported identifiers and filters out all other
// unexported declarations.
func (f *unexportedFilter) Filter(files []*ast.File) {
// Iterate to fixed point -- unexported types can include other unexported types.
oldLen := len(f.uses)
for {
for _, file := range files {
f.recordUses(file)
}
if len(f.uses) == oldLen {
break
}
oldLen = len(f.uses)
}
for _, file := range files {
var newDecls []ast.Decl
for _, decl := range file.Decls {
if f.filterDecl(decl) {
newDecls = append(newDecls, decl)
}
}
file.Decls = newDecls
file.Scope = nil
file.Unresolved = nil
file.Comments = nil
trimAST(file)
}
}
func (f *unexportedFilter) keep(ident *ast.Ident) bool {
return ast.IsExported(ident.Name) || f.uses[ident.Name]
}
func (f *unexportedFilter) filterDecl(decl ast.Decl) bool {
switch decl := decl.(type) {
case *ast.FuncDecl:
if ident := recvIdent(decl); ident != nil && !f.keep(ident) {
return false
}
return f.keep(decl.Name)
case *ast.GenDecl:
if decl.Tok == token.CONST {
// Constants can involve iota, and iota is hard to deal with.
return true
}
var newSpecs []ast.Spec
for _, spec := range decl.Specs {
if f.filterSpec(spec) {
newSpecs = append(newSpecs, spec)
}
}
decl.Specs = newSpecs
return len(newSpecs) != 0
case *ast.BadDecl:
return false
}
panic(fmt.Sprintf("unknown ast.Decl %T", decl))
}
func (f *unexportedFilter) filterSpec(spec ast.Spec) bool {
switch spec := spec.(type) {
case *ast.ImportSpec:
return true
case *ast.ValueSpec:
var newNames []*ast.Ident
for _, name := range spec.Names {
if f.keep(name) {
newNames = append(newNames, name)
}
}
spec.Names = newNames
return len(spec.Names) != 0
case *ast.TypeSpec:
if !f.keep(spec.Name) {
return false
}
switch typ := spec.Type.(type) {
case *ast.StructType:
f.filterFieldList(typ.Fields)
case *ast.InterfaceType:
f.filterFieldList(typ.Methods)
}
return true
}
panic(fmt.Sprintf("unknown ast.Spec %T", spec))
}
func (f *unexportedFilter) filterFieldList(fields *ast.FieldList) {
var newFields []*ast.Field
for _, field := range fields.List {
if len(field.Names) == 0 {
// Keep embedded fields: they can export methods and fields.
newFields = append(newFields, field)
}
for _, name := range field.Names {
if f.keep(name) {
newFields = append(newFields, field)
break
}
}
}
fields.List = newFields
}
func (f *unexportedFilter) recordUses(file *ast.File) {
for _, decl := range file.Decls {
switch decl := decl.(type) {
case *ast.FuncDecl:
// Ignore methods on dropped types.
if ident := recvIdent(decl); ident != nil && !f.keep(ident) {
break
}
// Ignore functions with dropped names.
if !f.keep(decl.Name) {
break
}
f.recordFuncType(decl.Type)
case *ast.GenDecl:
for _, spec := range decl.Specs {
switch spec := spec.(type) {
case *ast.ValueSpec:
for i, name := range spec.Names {
// Don't mess with constants -- iota is hard.
if f.keep(name) || decl.Tok == token.CONST {
f.recordIdents(spec.Type)
if len(spec.Values) > i {
f.recordIdents(spec.Values[i])
}
}
}
case *ast.TypeSpec:
switch typ := spec.Type.(type) {
case *ast.StructType:
f.recordFieldUses(false, typ.Fields)
case *ast.InterfaceType:
f.recordFieldUses(false, typ.Methods)
}
}
}
}
}
}
// recvIdent returns the identifier of a method receiver, e.g. *int.
func recvIdent(decl *ast.FuncDecl) *ast.Ident {
if decl.Recv == nil || len(decl.Recv.List) == 0 {
return nil
}
x := decl.Recv.List[0].Type
if star, ok := x.(*ast.StarExpr); ok {
x = star.X
}
if ident, ok := x.(*ast.Ident); ok {
return ident
}
return nil
}
// recordIdents records unexported identifiers in an Expr in uses.
// These may be types, e.g. in map[key]value, function names, e.g. in foo(),
// or simple variable references. References that will be discarded, such
// as those in function literal bodies, are ignored.
func (f *unexportedFilter) recordIdents(x ast.Expr) {
ast.Inspect(x, func(n ast.Node) bool {
if n == nil {
return false
}
if complit, ok := n.(*ast.CompositeLit); ok {
// We clear out composite literal contents; just record their type.
f.recordIdents(complit.Type)
return false
}
if flit, ok := n.(*ast.FuncLit); ok {
f.recordFuncType(flit.Type)
return false
}
if ident, ok := n.(*ast.Ident); ok && !ast.IsExported(ident.Name) {
f.uses[ident.Name] = true
}
return true
})
}
// recordFuncType records the types mentioned by a function type.
func (f *unexportedFilter) recordFuncType(x *ast.FuncType) {
f.recordFieldUses(true, x.Params)
f.recordFieldUses(true, x.Results)
}
// recordFieldUses records unexported identifiers used in fields, which may be
// struct members, interface members, or function parameter/results.
func (f *unexportedFilter) recordFieldUses(isParams bool, fields *ast.FieldList) {
if fields == nil {
return
}
for _, field := range fields.List {
if isParams {
// Parameter types of retained functions need to be retained.
f.recordIdents(field.Type)
continue
}
if ft, ok := field.Type.(*ast.FuncType); ok {
// Function declarations in interfaces need all their types retained.
f.recordFuncType(ft)
continue
}
if len(field.Names) == 0 {
// Embedded fields might contribute exported names.
f.recordIdents(field.Type)
}
for _, name := range field.Names {
// We only need normal fields if they're exported.
if ast.IsExported(name.Name) {
f.recordIdents(field.Type)
break
}
}
}
}
// ProcessErrors records additional uses from errors, returning the new uses
// and any unexpected errors.
func (f *unexportedFilter) ProcessErrors(errors []types.Error) (map[string]bool, []types.Error) {
var unexpected []types.Error
missing := map[string]bool{}
for _, err := range errors {
if strings.Contains(err.Msg, "missing return") {
continue
}
const undeclared = "undeclared name: "
if strings.HasPrefix(err.Msg, undeclared) {
missing[strings.TrimPrefix(err.Msg, undeclared)] = true
f.uses[strings.TrimPrefix(err.Msg, undeclared)] = true
continue
}
unexpected = append(unexpected, err)
}
return missing, unexpected
}
// trimAST clears any part of the AST not relevant to type checking
// expressions at pos.
func trimAST(file *ast.File) {
@ -353,6 +599,8 @@ func trimAST(file *ast.File) {
// expensive. Try to clear them out.
at, ok := n.Type.(*ast.ArrayType)
if !ok {
// Composite literal. No harm removing all its fields.
n.Elts = nil
break
}
// Removing the elements from an ellipsis array changes its type.

View File

@ -5,9 +5,17 @@
package cache
import (
"bytes"
"go/ast"
"go/format"
"go/parser"
"go/token"
"go/types"
"reflect"
"sort"
"testing"
"golang.org/x/tools/go/packages"
)
func TestArrayLength(t *testing.T) {
@ -35,3 +43,175 @@ func TestArrayLength(t *testing.T) {
}
}
}
func TestTrim(t *testing.T) {
tests := []struct {
name string
file string
kept []string
}{
{
name: "delete_unused",
file: `
type x struct{}
func y()
var z int
`,
kept: []string{},
},
{
// From the common type in testing.
name: "unexported_embedded",
file: `
type x struct {}
type Exported struct { x }
`,
kept: []string{"Exported", "x"},
},
{
// From the d type in unicode.
name: "exported_field_unexported_type",
file: `
type x struct {}
type Exported struct {
X x
}
`,
kept: []string{"Exported", "x"},
},
{
// From errNotExist in io/fs.
name: "exported_var_function_call",
file: `
func x() int { return 0 }
var Exported = x()
`,
kept: []string{"Exported", "x"},
},
{
// From DefaultServeMux in net/http.
name: "exported_pointer_to_unexported_var",
file: `
var Exported = &x
var x int
`,
kept: []string{"Exported", "x"},
},
{
// From DefaultWriter in goldmark/renderer/html.
name: "exported_pointer_to_composite_lit",
file: `
var Exported = &x{}
type x struct{}
`,
kept: []string{"Exported", "x"},
},
{
// From SelectDir in reflect.
name: "leave_constants",
file: `
type Enum int
const (
_ Enum = iota
EnumOne
)
`,
kept: []string{"Enum", "EnumOne"},
},
{
name: "constant_conversion",
file: `
type x int
const (
foo x = 0
)
`,
kept: []string{"x", "foo"},
},
{
name: "unexported_return",
file: `
type x int
func Exported() x {}
type y int
type Interface interface {
Exported() y
}
`,
kept: []string{"Exported", "Interface", "x", "y"},
},
{
name: "drop_composite_literals",
file: `
type x int
type Exported struct {
foo x
}
var Var = Exported{foo:1}
`,
kept: []string{"Exported", "Var"},
},
{
name: "drop_function_literals",
file: `
type x int
var Exported = func() { return x(0) }
`,
kept: []string{"Exported"},
},
{
name: "missing_receiver_panic",
file: `
func() foo() {}
`,
kept: []string{},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
fset := token.NewFileSet()
file, err := parser.ParseFile(fset, "main.go", "package main\n\n"+tt.file, parser.AllErrors)
if err != nil {
t.Fatal(err)
}
filter := &unexportedFilter{uses: map[string]bool{}}
filter.Filter([]*ast.File{file})
pkg := types.NewPackage("main", "main")
checker := types.NewChecker(&types.Config{
DisableUnusedImportCheck: true,
}, fset, pkg, nil)
if err := checker.Files([]*ast.File{file}); err != nil {
t.Error(err)
}
names := pkg.Scope().Names()
sort.Strings(names)
sort.Strings(tt.kept)
if !reflect.DeepEqual(names, tt.kept) {
t.Errorf("package contains names %v, wanted %v", names, tt.kept)
}
})
}
}
func TestPkg(t *testing.T) {
t.Skip("for manual debugging")
fset := token.NewFileSet()
pkgs, err := packages.Load(&packages.Config{
Mode: packages.NeedSyntax | packages.NeedFiles,
Fset: fset,
}, "io")
if err != nil {
t.Fatal(err)
}
if len(pkgs[0].Errors) != 0 {
t.Fatal(pkgs[0].Errors)
}
filter := &unexportedFilter{uses: map[string]bool{}}
filter.Filter(pkgs[0].Syntax)
for _, file := range pkgs[0].Syntax {
buf := &bytes.Buffer{}
format.Node(buf, fset, file)
t.Log(buf.String())
}
}

View File

@ -6,6 +6,7 @@ package cache
import (
"go/ast"
"go/scanner"
"go/types"
"golang.org/x/mod/module"
@ -16,19 +17,19 @@ import (
// pkg contains the type information needed by the source package.
type pkg struct {
m *metadata
mode source.ParseMode
goFiles []*source.ParsedGoFile
compiledGoFiles []*source.ParsedGoFile
diagnostics []*source.Diagnostic
imports map[packagePath]*pkg
version *module.Version
typeErrors []types.Error
types *types.Package
typesInfo *types.Info
typesSizes types.Sizes
hasListOrParseErrors bool
hasTypeErrors bool
m *metadata
mode source.ParseMode
goFiles []*source.ParsedGoFile
compiledGoFiles []*source.ParsedGoFile
diagnostics []*source.Diagnostic
imports map[packagePath]*pkg
version *module.Version
parseErrors []scanner.ErrorList
typeErrors []types.Error
types *types.Package
typesInfo *types.Info
typesSizes types.Sizes
hasFixedFiles bool
}
// Declare explicit types for package paths, names, and IDs to ensure that we
@ -146,9 +147,9 @@ func (p *pkg) Version() *module.Version {
}
func (p *pkg) HasListOrParseErrors() bool {
return p.hasListOrParseErrors
return len(p.m.errors) != 0 || len(p.parseErrors) != 0
}
func (p *pkg) HasTypeErrors() bool {
return p.hasTypeErrors
return len(p.typeErrors) != 0
}

View File

@ -1394,7 +1394,6 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC
continue
}
newGen.Inherit(v.handle)
newGen.Inherit(v.astCacheHandle)
result.goFiles[k] = v
}

View File

@ -213,27 +213,16 @@ func (c *completer) item(ctx context.Context, cand candidate) (CompletionItem, e
}
uri := span.URIFromPath(pos.Filename)
// Find the source file of the candidate, starting from a package
// that should have it in its dependencies.
searchPkg := c.pkg
if cand.imp != nil && cand.imp.pkg != nil {
searchPkg = cand.imp.pkg
}
pgf, pkg, err := source.FindPosInPackage(c.snapshot, searchPkg, obj.Pos())
// Find the source file of the candidate.
pkg, err := source.FindPackageFromPos(ctx, c.snapshot, obj.Pos())
if err != nil {
return item, nil
}
posToDecl, err := c.snapshot.PosToDecl(ctx, pgf)
decl, err := c.snapshot.PosToDecl(ctx, pkg, obj.Pos())
if err != nil {
return CompletionItem{}, err
}
decl := posToDecl[obj.Pos()]
if decl == nil {
return item, nil
}
hover, err := source.HoverInfo(ctx, c.snapshot, pkg, obj, decl)
if err != nil {
event.Error(ctx, "failed to find Hover", err, tag.URI.Of(uri))

View File

@ -19,7 +19,6 @@ import (
"golang.org/x/tools/internal/event"
"golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/span"
errors "golang.org/x/xerrors"
)
@ -320,21 +319,12 @@ func HoverInfo(ctx context.Context, s Snapshot, pkg Package, obj types.Object, n
break
}
f := s.FileSet().File(obj.Pos())
if f == nil {
break
}
pgf, err := pkg.File(span.URIFromPath(f.Name()))
if err != nil {
return nil, err
}
posToField, err := s.PosToField(ctx, pgf)
field, err := s.PosToField(ctx, pkg, obj.Pos())
if err != nil {
return nil, err
}
if field := posToField[obj.Pos()]; field != nil {
if field != nil {
comment := field.Doc
if comment.Text() == "" {
comment = field.Comment

View File

@ -262,7 +262,11 @@ func findIdentifier(ctx context.Context, snapshot Snapshot, pkg Package, file *a
}
result.Declaration.MappedRange = append(result.Declaration.MappedRange, rng)
if result.Declaration.node, err = objToDecl(ctx, snapshot, pkg, result.Declaration.obj); err != nil {
declPkg, err := FindPackageFromPos(ctx, snapshot, result.Declaration.obj.Pos())
if err != nil {
return nil, nil
}
if result.Declaration.node, err = snapshot.PosToDecl(ctx, declPkg, result.Declaration.obj.Pos()); err != nil {
return nil, err
}
typ := pkg.GetTypesInfo().TypeOf(result.ident)
@ -363,18 +367,6 @@ func hasErrorType(obj types.Object) bool {
return types.IsInterface(obj.Type()) && obj.Pkg() == nil && obj.Name() == "error"
}
func objToDecl(ctx context.Context, snapshot Snapshot, srcPkg Package, obj types.Object) (ast.Decl, error) {
pgf, _, err := FindPosInPackage(snapshot, srcPkg, obj.Pos())
if err != nil {
return nil, err
}
posToDecl, err := snapshot.PosToDecl(ctx, pgf)
if err != nil {
return nil, err
}
return posToDecl[obj.Pos()], nil
}
// importSpec handles positions inside of an *ast.ImportSpec.
func importSpec(snapshot Snapshot, pkg Package, file *ast.File, pos token.Pos) (*IdentifierInfo, error) {
var imp *ast.ImportSpec

View File

@ -93,7 +93,11 @@ FindCall:
comment *ast.CommentGroup
)
if obj != nil {
node, err := objToDecl(ctx, snapshot, pkg, obj)
declPkg, err := FindPackageFromPos(ctx, snapshot, obj.Pos())
if err != nil {
return nil, 0, err
}
node, err := snapshot.PosToDecl(ctx, declPkg, obj.Pos())
if err != nil {
return nil, 0, err
}

View File

@ -219,12 +219,12 @@ func NewSignature(ctx context.Context, s Snapshot, pkg Package, sig *types.Signa
// To do this, it looks in the AST of the file in which the object is declared.
// On any errors, it always fallbacks back to types.TypeString.
func FormatVarType(ctx context.Context, snapshot Snapshot, srcpkg Package, obj *types.Var, qf types.Qualifier) string {
pgf, pkg, err := FindPosInPackage(snapshot, srcpkg, obj.Pos())
pkg, err := FindPackageFromPos(ctx, snapshot, obj.Pos())
if err != nil {
return types.TypeString(obj.Type(), qf)
}
expr, err := varType(ctx, snapshot, pgf, obj)
expr, err := varType(ctx, snapshot, pkg, obj)
if err != nil {
return types.TypeString(obj.Type(), qf)
}
@ -244,12 +244,11 @@ func FormatVarType(ctx context.Context, snapshot Snapshot, srcpkg Package, obj *
}
// varType returns the type expression for a *types.Var.
func varType(ctx context.Context, snapshot Snapshot, pgf *ParsedGoFile, obj *types.Var) (ast.Expr, error) {
posToField, err := snapshot.PosToField(ctx, pgf)
func varType(ctx context.Context, snapshot Snapshot, pkg Package, obj *types.Var) (ast.Expr, error) {
field, err := snapshot.PosToField(ctx, pkg, obj.Pos())
if err != nil {
return nil, err
}
field := posToField[obj.Pos()]
if field == nil {
return nil, fmt.Errorf("no declaration for object %s", obj.Name())
}

View File

@ -274,20 +274,19 @@ func CompareDiagnostic(a, b *Diagnostic) int {
return 1
}
// FindPosInPackage finds the parsed file for a position in a given search
// FindPackageFromPos finds the parsed file for a position in a given search
// package.
func FindPosInPackage(snapshot Snapshot, searchpkg Package, pos token.Pos) (*ParsedGoFile, Package, error) {
func FindPackageFromPos(ctx context.Context, snapshot Snapshot, pos token.Pos) (Package, error) {
tok := snapshot.FileSet().File(pos)
if tok == nil {
return nil, nil, errors.Errorf("no file for pos in package %s", searchpkg.ID())
return nil, errors.Errorf("no file for pos %v", pos)
}
uri := span.URIFromPath(tok.Name())
pgf, pkg, err := findFileInDeps(searchpkg, uri)
pkgs, err := snapshot.PackagesForFile(ctx, uri, TypecheckWorkspace)
if err != nil {
return nil, nil, err
return nil, err
}
return pgf, pkg, nil
return pkgs[0], nil
}
// findFileInDeps finds uri in pkg or its dependencies.

View File

@ -80,12 +80,12 @@ type Snapshot interface {
// to quickly find corresponding *ast.Field node given a *types.Var.
// We must refer to the AST to render type aliases properly when
// formatting signatures and other types.
PosToField(ctx context.Context, pgf *ParsedGoFile) (map[token.Pos]*ast.Field, error)
PosToField(ctx context.Context, pkg Package, pos token.Pos) (*ast.Field, error)
// PosToDecl maps certain objects' positions to their surrounding
// ast.Decl. This mapping is used when building the documentation
// string for the objects.
PosToDecl(ctx context.Context, pgf *ParsedGoFile) (map[token.Pos]ast.Decl, error)
PosToDecl(ctx context.Context, pkg Package, pos token.Pos) (ast.Decl, error)
// DiagnosePackage returns basic diagnostics, including list, parse, and type errors
// for pkg, grouped by file.
@ -419,10 +419,9 @@ const (
// This is the mode used when attempting to examine the package graph structure.
ParseHeader ParseMode = iota
// ParseExported specifies that the public symbols are needed, but things like
// private symbols and function bodies are not.
// This mode is used for things where a package is being consumed only as a
// dependency.
// ParseExported specifies that the package is used only as a dependency,
// and only its exported declarations are needed. More may be included if
// necessary to avoid type errors.
ParseExported
// ParseFull specifies the full AST is needed.