mirror of https://github.com/golang/go.git
all: fix some staticcheck errors
Updates golang/go#35718 Change-Id: I10bfd5421cd44bb58b8bcaa6e9205040c25f51be Reviewed-on: https://go-review.googlesource.com/c/tools/+/208257 Run-TryBot: Rebecca Stambler <rstambler@golang.org> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: Heschi Kreinick <heschi@google.com>
This commit is contained in:
parent
cf670267be
commit
207d3de1fa
|
|
@ -246,7 +246,7 @@ func goList(dir string) (*Pkg, error) {
|
|||
var pkg Pkg
|
||||
out, err := exec.Command(*flagGoCmd, "list", "-json", dir).Output()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("go list -json %s: %v\n", dir, err)
|
||||
return nil, fmt.Errorf("go list -json %s: %v", dir, err)
|
||||
}
|
||||
if err := json.Unmarshal(out, &pkg); err != nil {
|
||||
return nil, fmt.Errorf("go list -json %s: unmarshal: %v", dir, err)
|
||||
|
|
|
|||
|
|
@ -417,7 +417,7 @@ func digraph(cmd string, args []string) error {
|
|||
|
||||
case "succs", "preds":
|
||||
if len(args) == 0 {
|
||||
return fmt.Errorf("usage: digraph %s <node> ...", cmd)
|
||||
return fmt.Errorf("usage: digraph %s <node> ... ", cmd)
|
||||
}
|
||||
g := g
|
||||
if cmd == "preds" {
|
||||
|
|
@ -435,7 +435,7 @@ func digraph(cmd string, args []string) error {
|
|||
|
||||
case "forward", "reverse":
|
||||
if len(args) == 0 {
|
||||
return fmt.Errorf("usage: digraph %s <node> ...", cmd)
|
||||
return fmt.Errorf("usage: digraph %s <node> ... ", cmd)
|
||||
}
|
||||
roots := make(nodeset)
|
||||
for _, root := range args {
|
||||
|
|
|
|||
|
|
@ -126,7 +126,7 @@ func main() {
|
|||
flag.Parse()
|
||||
|
||||
if len(flag.Args()) == 0 {
|
||||
fmt.Fprintf(stderr, usage)
|
||||
fmt.Fprint(stderr, usage)
|
||||
os.Exit(1)
|
||||
}
|
||||
if !fiximports(flag.Args()...) {
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ var (
|
|||
version = "devel"
|
||||
)
|
||||
|
||||
var exitCleanly error = errors.New("exit cleanly sentinel value")
|
||||
var errExitCleanly error = errors.New("exit cleanly sentinel value")
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
|
|
@ -41,7 +41,7 @@ func main() {
|
|||
|
||||
runStep := func(s step) {
|
||||
err := s(ctx)
|
||||
if err == exitCleanly {
|
||||
if err == errExitCleanly {
|
||||
os.Exit(0)
|
||||
}
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ func welcome(ctx context.Context) error {
|
|||
}
|
||||
if strings.ToLower(answer) != "y" {
|
||||
fmt.Println("Exiting install.")
|
||||
return exitCleanly
|
||||
return errExitCleanly
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
@ -65,7 +65,7 @@ func chooseVersion(ctx context.Context) error {
|
|||
if strings.ToLower(answer) != "y" {
|
||||
// TODO: handle passing a version
|
||||
fmt.Println("Aborting install.")
|
||||
return exitCleanly
|
||||
return errExitCleanly
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
@ -79,7 +79,7 @@ func downloadGo(ctx context.Context) error {
|
|||
|
||||
if strings.ToLower(answer) != "y" {
|
||||
fmt.Println("Aborting install.")
|
||||
return exitCleanly
|
||||
return errExitCleanly
|
||||
}
|
||||
|
||||
fmt.Printf("Downloading Go version %s to %s\n", *goVersion, installPath)
|
||||
|
|
@ -105,7 +105,7 @@ func setupGOPATH(ctx context.Context) error {
|
|||
|
||||
if strings.ToLower(answer) != "y" {
|
||||
fmt.Println("Exiting and not setting up GOPATH.")
|
||||
return exitCleanly
|
||||
return errExitCleanly
|
||||
}
|
||||
|
||||
fmt.Println("Setting up GOPATH")
|
||||
|
|
|
|||
|
|
@ -6,8 +6,11 @@
|
|||
|
||||
package main
|
||||
|
||||
import "go/importer"
|
||||
import (
|
||||
"go/importer"
|
||||
"go/token"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("gc", importer.For("gc", nil))
|
||||
register("gc", importer.ForCompiler(token.NewFileSet(), "gc", nil))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,11 +8,12 @@ package main
|
|||
|
||||
import (
|
||||
"go/importer"
|
||||
"go/token"
|
||||
"go/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
register("gccgo", importer.For("gccgo", nil))
|
||||
register("gccgo", importer.ForCompiler(token.NewFileSet(), "gccgo", nil))
|
||||
}
|
||||
|
||||
// Print the extra gccgo compiler data for this package, if it exists.
|
||||
|
|
|
|||
|
|
@ -23,9 +23,9 @@ var (
|
|||
|
||||
// lists of registered sources and corresponding importers
|
||||
var (
|
||||
sources []string
|
||||
importers []types.Importer
|
||||
importFailed = errors.New("import failed")
|
||||
sources []string
|
||||
importers []types.Importer
|
||||
errImportFailed = errors.New("import failed")
|
||||
)
|
||||
|
||||
func usage() {
|
||||
|
|
@ -154,7 +154,7 @@ func (p *protector) Import(path string) (pkg *types.Package, err error) {
|
|||
defer func() {
|
||||
if recover() != nil {
|
||||
pkg = nil
|
||||
err = importFailed
|
||||
err = errImportFailed
|
||||
}
|
||||
}()
|
||||
return p.imp.Import(path)
|
||||
|
|
|
|||
|
|
@ -133,7 +133,7 @@ func (p *printer) writeTypeInternal(this *types.Package, typ types.Type, visited
|
|||
p.print("\n")
|
||||
}
|
||||
for i, n := 0, t.NumEmbeddeds(); i < n; i++ {
|
||||
typ := t.Embedded(i)
|
||||
typ := t.EmbeddedType(i)
|
||||
p.writeTypeInternal(this, typ, visited)
|
||||
p.print("\n")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -284,7 +284,7 @@ func checkPkgFiles(files []*ast.File) {
|
|||
}
|
||||
report(err)
|
||||
},
|
||||
Importer: importer.For(*compiler, nil),
|
||||
Importer: importer.ForCompiler(fset, *compiler, nil),
|
||||
Sizes: SizesFor(build.Default.Compiler, build.Default.GOARCH),
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -587,7 +587,7 @@ func TestFailFastOnShallowCopy(t *testing.T) {
|
|||
t.Errorf("shallow copy: recover() = %q, want %q", got, want)
|
||||
}
|
||||
}()
|
||||
y.String() // panics
|
||||
_ = y.String() // panics
|
||||
t.Error("didn't panic as expected")
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
/*
|
||||
|
||||
The analysis package defines the interface between a modular static
|
||||
Package analysis defines the interface between a modular static
|
||||
analysis and an analysis driver program.
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This package constructs a simple control-flow graph (CFG) of the
|
||||
// Package cfg constructs a simple control-flow graph (CFG) of the
|
||||
// statements and expressions within a single function.
|
||||
//
|
||||
// Use cfg.New to construct the CFG for a function body.
|
||||
|
|
|
|||
|
|
@ -2,9 +2,7 @@
|
|||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cgo
|
||||
|
||||
// This file handles cgo preprocessing of files containing `import "C"`.
|
||||
// Package cgo handles cgo preprocessing of files containing `import "C"`.
|
||||
//
|
||||
// DESIGN
|
||||
//
|
||||
|
|
@ -51,6 +49,8 @@ package cgo
|
|||
// its handling of function calls, analogous to the treatment of map
|
||||
// lookups in which y=m[k] and y,ok=m[k] are both legal.
|
||||
|
||||
package cgo
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
|
|
|
|||
|
|
@ -84,7 +84,7 @@ func standardArExportData(archive io.ReadSeeker) (io.ReadSeeker, error) {
|
|||
}
|
||||
off += arHdrSize
|
||||
|
||||
if bytes.Compare(hdrBuf[arFmagOff:arFmagOff+arFmagSize], []byte(arfmag)) != 0 {
|
||||
if !bytes.Equal(hdrBuf[arFmagOff:arFmagOff+arFmagSize], []byte(arfmag)) {
|
||||
return nil, fmt.Errorf("archive header format header (%q)", hdrBuf[:])
|
||||
}
|
||||
|
||||
|
|
@ -94,7 +94,7 @@ func standardArExportData(archive io.ReadSeeker) (io.ReadSeeker, error) {
|
|||
}
|
||||
|
||||
fn := hdrBuf[arNameOff : arNameOff+arNameSize]
|
||||
if fn[0] == '/' && (fn[1] == ' ' || fn[1] == '/' || bytes.Compare(fn[:8], []byte("/SYM64/ ")) == 0) {
|
||||
if fn[0] == '/' && (fn[1] == ' ' || fn[1] == '/' || bytes.Equal(fn[:8], []byte("/SYM64/ "))) {
|
||||
// Archive symbol table or extended name table,
|
||||
// which we don't care about.
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -1025,7 +1025,7 @@ func (p *parser) skipInlineBody() {
|
|||
func (p *parser) parseTypes(pkg *types.Package) {
|
||||
maxp1 := p.parseInt()
|
||||
exportedp1 := p.parseInt()
|
||||
p.typeList = make([]types.Type, maxp1, maxp1)
|
||||
p.typeList = make([]types.Type, maxp1)
|
||||
|
||||
type typeOffset struct {
|
||||
offset int
|
||||
|
|
|
|||
|
|
@ -344,7 +344,7 @@ func (p *parser) expectKeyword(keyword string) {
|
|||
|
||||
// PackageId = string_lit .
|
||||
//
|
||||
func (p *parser) parsePackageId() string {
|
||||
func (p *parser) parsePackageID() string {
|
||||
id, err := strconv.Unquote(p.expect(scanner.String))
|
||||
if err != nil {
|
||||
p.error(err)
|
||||
|
|
@ -384,7 +384,7 @@ func (p *parser) parseDotIdent() string {
|
|||
//
|
||||
func (p *parser) parseQualifiedName() (id, name string) {
|
||||
p.expect('@')
|
||||
id = p.parsePackageId()
|
||||
id = p.parsePackageID()
|
||||
p.expect('.')
|
||||
// Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields.
|
||||
if p.tok == '?' {
|
||||
|
|
@ -696,7 +696,7 @@ func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
|
|||
|
||||
// Complete requires the type's embedded interfaces to be fully defined,
|
||||
// but we do not define any
|
||||
return types.NewInterface(methods, nil).Complete()
|
||||
return types.NewInterfaceType(methods, nil).Complete()
|
||||
}
|
||||
|
||||
// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
|
||||
|
|
@ -785,7 +785,7 @@ func (p *parser) parseType(parent *types.Package) types.Type {
|
|||
func (p *parser) parseImportDecl() {
|
||||
p.expectKeyword("import")
|
||||
name := p.parsePackageName()
|
||||
p.getPkg(p.parsePackageId(), name)
|
||||
p.getPkg(p.parsePackageID(), name)
|
||||
}
|
||||
|
||||
// int_lit = [ "+" | "-" ] { "0" ... "9" } .
|
||||
|
|
|
|||
|
|
@ -230,11 +230,11 @@ func (s PointsToSet) DynamicTypes() *typeutil.Map {
|
|||
if s.pts != nil {
|
||||
var space [50]int
|
||||
for _, x := range s.pts.AppendTo(space[:0]) {
|
||||
ifaceObjId := nodeid(x)
|
||||
if !s.a.isTaggedObject(ifaceObjId) {
|
||||
ifaceObjID := nodeid(x)
|
||||
if !s.a.isTaggedObject(ifaceObjID) {
|
||||
continue // !CanHaveDynamicTypes(tDyn)
|
||||
}
|
||||
tDyn, v, indirect := s.a.taggedValue(ifaceObjId)
|
||||
tDyn, v, indirect := s.a.taggedValue(ifaceObjID)
|
||||
if indirect {
|
||||
panic("indirect tagged object") // implement later
|
||||
}
|
||||
|
|
@ -251,13 +251,13 @@ func (s PointsToSet) DynamicTypes() *typeutil.Map {
|
|||
|
||||
// Intersects reports whether this points-to set and the
|
||||
// argument points-to set contain common members.
|
||||
func (x PointsToSet) Intersects(y PointsToSet) bool {
|
||||
if x.pts == nil || y.pts == nil {
|
||||
func (s PointsToSet) Intersects(y PointsToSet) bool {
|
||||
if s.pts == nil || y.pts == nil {
|
||||
return false
|
||||
}
|
||||
// This takes Θ(|x|+|y|) time.
|
||||
var z intsets.Sparse
|
||||
z.Intersection(&x.pts.Sparse, &y.pts.Sparse)
|
||||
z.Intersection(&s.pts.Sparse, &y.pts.Sparse)
|
||||
return !z.IsEmpty()
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ import (
|
|||
)
|
||||
|
||||
var (
|
||||
tEface = types.NewInterface(nil, nil).Complete()
|
||||
tEface = types.NewInterfaceType(nil, nil).Complete()
|
||||
tInvalid = types.Typ[types.Invalid]
|
||||
tUnsafePtr = types.Typ[types.UnsafePointer]
|
||||
)
|
||||
|
|
@ -503,8 +503,7 @@ func (a *analysis) genAppend(instr *ssa.Call, cgn *cgnode) {
|
|||
y := instr.Call.Args[1]
|
||||
tArray := sliceToArray(instr.Call.Args[0].Type())
|
||||
|
||||
var w nodeid
|
||||
w = a.nextNode()
|
||||
w := a.nextNode()
|
||||
a.addNodes(tArray, "append")
|
||||
a.endObject(w, cgn, instr)
|
||||
|
||||
|
|
|
|||
|
|
@ -391,10 +391,9 @@ func (c *storeConstraint) presolve(h *hvn) {
|
|||
if debugHVNVerbose && h.log != nil {
|
||||
fmt.Fprintf(h.log, "\to%d --> o%d\n", h.ref(odst), osrc)
|
||||
}
|
||||
} else {
|
||||
// We don't interpret store-with-offset.
|
||||
// See discussion of soundness at markIndirectNodes.
|
||||
}
|
||||
// We don't interpret store-with-offset.
|
||||
// See discussion of soundness at markIndirectNodes.
|
||||
}
|
||||
|
||||
// dst = &src.offset
|
||||
|
|
@ -785,11 +784,11 @@ func (h *hvn) simplify() {
|
|||
assert(peLabels.Len() == 1, "PE class is not a singleton")
|
||||
label := peLabel(peLabels.Min())
|
||||
|
||||
canonId := canon[label]
|
||||
if canonId == nodeid(h.N) {
|
||||
canonID := canon[label]
|
||||
if canonID == nodeid(h.N) {
|
||||
// id becomes the representative of the PE label.
|
||||
canonId = id
|
||||
canon[label] = canonId
|
||||
canonID = id
|
||||
canon[label] = canonID
|
||||
|
||||
if h.a.log != nil {
|
||||
fmt.Fprintf(h.a.log, "\tpts(n%d) is canonical : \t(%s)\n",
|
||||
|
|
@ -798,8 +797,8 @@ func (h *hvn) simplify() {
|
|||
|
||||
} else {
|
||||
// Link the solver states for the two nodes.
|
||||
assert(h.a.nodes[canonId].solve != nil, "missing solver state")
|
||||
h.a.nodes[id].solve = h.a.nodes[canonId].solve
|
||||
assert(h.a.nodes[canonID].solve != nil, "missing solver state")
|
||||
h.a.nodes[id].solve = h.a.nodes[canonID].solve
|
||||
|
||||
if h.a.log != nil {
|
||||
// TODO(adonovan): debug: reorganize the log so it prints
|
||||
|
|
@ -807,11 +806,11 @@ func (h *hvn) simplify() {
|
|||
// pe y = x1, ..., xn
|
||||
// for each canonical y. Requires allocation.
|
||||
fmt.Fprintf(h.a.log, "\tpts(n%d) = pts(n%d) : %s\n",
|
||||
id, canonId, h.a.nodes[id].typ)
|
||||
id, canonID, h.a.nodes[id].typ)
|
||||
}
|
||||
}
|
||||
|
||||
mapping[id] = canonId
|
||||
mapping[id] = canonID
|
||||
}
|
||||
|
||||
// Renumber the constraints, eliminate duplicates, and eliminate
|
||||
|
|
|
|||
|
|
@ -34,8 +34,8 @@ func (a *analysis) renumber() {
|
|||
}
|
||||
|
||||
N := nodeid(len(a.nodes))
|
||||
newNodes := make([]*node, N, N)
|
||||
renumbering := make([]nodeid, N, N) // maps old to new
|
||||
newNodes := make([]*node, N)
|
||||
renumbering := make([]nodeid, N) // maps old to new
|
||||
|
||||
var i, j nodeid
|
||||
|
||||
|
|
|
|||
|
|
@ -277,8 +277,8 @@ func (ns *nodeset) add(n nodeid) bool {
|
|||
return ns.Sparse.Insert(int(n))
|
||||
}
|
||||
|
||||
func (x *nodeset) addAll(y *nodeset) bool {
|
||||
return x.UnionWith(&y.Sparse)
|
||||
func (ns *nodeset) addAll(y *nodeset) bool {
|
||||
return ns.UnionWith(&y.Sparse)
|
||||
}
|
||||
|
||||
// Profiling & debugging -------------------------------------------------------
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ var (
|
|||
tString = types.Typ[types.String]
|
||||
tUntypedNil = types.Typ[types.UntypedNil]
|
||||
tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators
|
||||
tEface = types.NewInterface(nil, nil).Complete()
|
||||
tEface = types.NewInterfaceType(nil, nil).Complete()
|
||||
|
||||
// SSA Value constants.
|
||||
vZero = intConst(0)
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre
|
|||
//
|
||||
func (f *Function) DomPreorder() []*BasicBlock {
|
||||
n := len(f.Blocks)
|
||||
order := make(byDomPreorder, n, n)
|
||||
order := make(byDomPreorder, n)
|
||||
copy(order, f.Blocks)
|
||||
sort.Sort(order)
|
||||
return order
|
||||
|
|
@ -123,7 +123,7 @@ func buildDomTree(f *Function) {
|
|||
n := len(f.Blocks)
|
||||
// Allocate space for 5 contiguous [n]*BasicBlock arrays:
|
||||
// sdom, parent, ancestor, preorder, buckets.
|
||||
space := make([]*BasicBlock, 5*n, 5*n)
|
||||
space := make([]*BasicBlock, 5*n)
|
||||
lt := ltState{
|
||||
sdom: space[0:n],
|
||||
parent: space[n : 2*n],
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@ func noncalls() {
|
|||
Uses: make(map[*ast.Ident]types.Object),
|
||||
Selections: make(map[*ast.SelectorExpr]*types.Selection),
|
||||
}
|
||||
cfg := &types.Config{Importer: importer.For("source", nil)}
|
||||
cfg := &types.Config{Importer: importer.ForCompiler(fset, "source", nil)}
|
||||
if _, err := cfg.Check("p", fset, []*ast.File{f}, info); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ func TestMap(t *testing.T) {
|
|||
tmap.At(tPStr1)
|
||||
tmap.Delete(tPStr1)
|
||||
tmap.KeysString()
|
||||
tmap.String()
|
||||
_ = tmap.String()
|
||||
|
||||
tmap = new(typeutil.Map)
|
||||
|
||||
|
|
|
|||
|
|
@ -332,8 +332,8 @@ func hasThirdParty(list []DirEntry) bool {
|
|||
// If filter is set, only the directory entries whose paths match the filter
|
||||
// are included.
|
||||
//
|
||||
func (root *Directory) listing(skipRoot bool, filter func(string) bool) *DirList {
|
||||
if root == nil {
|
||||
func (dir *Directory) listing(skipRoot bool, filter func(string) bool) *DirList {
|
||||
if dir == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
@ -341,7 +341,7 @@ func (root *Directory) listing(skipRoot bool, filter func(string) bool) *DirList
|
|||
n := 0
|
||||
minDepth := 1 << 30 // infinity
|
||||
maxDepth := 0
|
||||
for d := range root.iter(skipRoot) {
|
||||
for d := range dir.iter(skipRoot) {
|
||||
n++
|
||||
if minDepth > d.Depth {
|
||||
minDepth = d.Depth
|
||||
|
|
@ -358,7 +358,7 @@ func (root *Directory) listing(skipRoot bool, filter func(string) bool) *DirList
|
|||
|
||||
// create list
|
||||
list := make([]DirEntry, 0, n)
|
||||
for d := range root.iter(skipRoot) {
|
||||
for d := range dir.iter(skipRoot) {
|
||||
if filter != nil && !filter(d.Path) {
|
||||
continue
|
||||
}
|
||||
|
|
@ -368,7 +368,7 @@ func (root *Directory) listing(skipRoot bool, filter func(string) bool) *DirList
|
|||
// the path is relative to root.Path - remove the root.Path
|
||||
// prefix (the prefix should always be present but avoid
|
||||
// crashes and check)
|
||||
path := strings.TrimPrefix(d.Path, root.Path)
|
||||
path := strings.TrimPrefix(d.Path, dir.Path)
|
||||
// remove leading separator if any - path must be relative
|
||||
path = strings.TrimPrefix(path, "/")
|
||||
p.Path = path
|
||||
|
|
|
|||
|
|
@ -312,9 +312,7 @@ func linkedField(line []byte, ids map[string]string) string {
|
|||
//
|
||||
// TODO: do this better, so it works for all
|
||||
// comments, including unconventional ones.
|
||||
if bytes.HasPrefix(line, commentPrefix) {
|
||||
line = line[len(commentPrefix):]
|
||||
}
|
||||
line = bytes.TrimPrefix(line, commentPrefix)
|
||||
id := scanIdentifier(line)
|
||||
if len(id) == 0 {
|
||||
// No leading identifier. Avoid map lookup for
|
||||
|
|
|
|||
|
|
@ -156,7 +156,7 @@ func Handler(target string) http.Handler {
|
|||
})
|
||||
}
|
||||
|
||||
var validId = regexp.MustCompile(`^[A-Za-z0-9-]*/?$`)
|
||||
var validID = regexp.MustCompile(`^[A-Za-z0-9-]*/?$`)
|
||||
|
||||
func PrefixHandler(prefix, baseURL string) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -166,7 +166,7 @@ func PrefixHandler(prefix, baseURL string) http.Handler {
|
|||
return
|
||||
}
|
||||
id := r.URL.Path[len(prefix):]
|
||||
if !validId.MatchString(id) {
|
||||
if !validID.MatchString(id) {
|
||||
http.Error(w, "Not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
|
@ -192,7 +192,7 @@ func clHandler(w http.ResponseWriter, r *http.Request) {
|
|||
id := r.URL.Path[len(prefix):]
|
||||
// support /cl/152700045/, which is used in commit 0edafefc36.
|
||||
id = strings.TrimSuffix(id, "/")
|
||||
if !validId.MatchString(id) {
|
||||
if !validID.MatchString(id) {
|
||||
http.Error(w, "Not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ func TestStaticIsUpToDate(t *testing.T) {
|
|||
t.Errorf("error while generating static.go: %v\n", err)
|
||||
}
|
||||
|
||||
if bytes.Compare(oldBuf, newBuf) != 0 {
|
||||
if !bytes.Equal(oldBuf, newBuf) {
|
||||
t.Error(`static.go is stale. Run:
|
||||
$ go generate golang.org/x/tools/godoc/static
|
||||
$ git diff
|
||||
|
|
|
|||
|
|
@ -14,14 +14,14 @@ import (
|
|||
"sync"
|
||||
)
|
||||
|
||||
// TraverseLink is used as a return value from WalkFuncs to indicate that the
|
||||
// ErrTraverseLink is used as a return value from WalkFuncs to indicate that the
|
||||
// symlink named in the call may be traversed.
|
||||
var TraverseLink = errors.New("fastwalk: traverse symlink, assuming target is a directory")
|
||||
var ErrTraverseLink = errors.New("fastwalk: traverse symlink, assuming target is a directory")
|
||||
|
||||
// SkipFiles is a used as a return value from WalkFuncs to indicate that the
|
||||
// ErrSkipFiles is a used as a return value from WalkFuncs to indicate that the
|
||||
// callback should not be called for any other files in the current directory.
|
||||
// Child directories will still be traversed.
|
||||
var SkipFiles = errors.New("fastwalk: skip remaining files in directory")
|
||||
var ErrSkipFiles = errors.New("fastwalk: skip remaining files in directory")
|
||||
|
||||
// Walk is a faster implementation of filepath.Walk.
|
||||
//
|
||||
|
|
@ -167,7 +167,7 @@ func (w *walker) onDirEnt(dirName, baseName string, typ os.FileMode) error {
|
|||
|
||||
err := w.fn(joined, typ)
|
||||
if typ == os.ModeSymlink {
|
||||
if err == TraverseLink {
|
||||
if err == ErrTraverseLink {
|
||||
// Set callbackDone so we don't call it twice for both the
|
||||
// symlink-as-symlink and the symlink-as-directory later:
|
||||
w.enqueue(walkItem{dir: joined, callbackDone: true})
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) e
|
|||
continue
|
||||
}
|
||||
if err := fn(dirName, fi.Name(), fi.Mode()&os.ModeType); err != nil {
|
||||
if err == SkipFiles {
|
||||
if err == ErrSkipFiles {
|
||||
skipFiles = true
|
||||
continue
|
||||
}
|
||||
|
|
|
|||
|
|
@ -184,7 +184,7 @@ func TestFastWalk_SkipFiles(t *testing.T) {
|
|||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
want["/src/"+filepath.Base(path)] = 0
|
||||
return fastwalk.SkipFiles
|
||||
return fastwalk.ErrSkipFiles
|
||||
}
|
||||
return nil
|
||||
},
|
||||
|
|
@ -208,7 +208,7 @@ func TestFastWalk_TraverseSymlink(t *testing.T) {
|
|||
},
|
||||
func(path string, typ os.FileMode) error {
|
||||
if typ == os.ModeSymlink {
|
||||
return fastwalk.TraverseLink
|
||||
return fastwalk.ErrTraverseLink
|
||||
}
|
||||
return nil
|
||||
},
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) e
|
|||
continue
|
||||
}
|
||||
if err := fn(dirName, name, typ); err != nil {
|
||||
if err == SkipFiles {
|
||||
if err == ErrSkipFiles {
|
||||
skipFiles = true
|
||||
continue
|
||||
}
|
||||
|
|
|
|||
|
|
@ -189,14 +189,14 @@ func (w *walker) walk(path string, typ os.FileMode) error {
|
|||
if dir == w.root.Path && (w.root.Type == RootGOROOT || w.root.Type == RootGOPATH) {
|
||||
// Doesn't make sense to have regular files
|
||||
// directly in your $GOPATH/src or $GOROOT/src.
|
||||
return fastwalk.SkipFiles
|
||||
return fastwalk.ErrSkipFiles
|
||||
}
|
||||
if !strings.HasSuffix(path, ".go") {
|
||||
return nil
|
||||
}
|
||||
|
||||
w.add(w.root, dir)
|
||||
return fastwalk.SkipFiles
|
||||
return fastwalk.ErrSkipFiles
|
||||
}
|
||||
if typ == os.ModeDir {
|
||||
base := filepath.Base(path)
|
||||
|
|
@ -224,7 +224,7 @@ func (w *walker) walk(path string, typ os.FileMode) error {
|
|||
return nil
|
||||
}
|
||||
if w.shouldTraverse(dir, fi) {
|
||||
return fastwalk.TraverseLink
|
||||
return fastwalk.ErrTraverseLink
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
|||
|
|
@ -2665,7 +2665,7 @@ func _() {
|
|||
defer wg.Done()
|
||||
_, err := t.process("foo.com", "p/first.go", nil, nil)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
t.Error(err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ func (VersionTag) UnmarshalJSON(data []byte) error {
|
|||
return err
|
||||
}
|
||||
if version != "2.0" {
|
||||
return fmt.Errorf("Invalid RPC version %v", version)
|
||||
return fmt.Errorf("invalid RPC version %v", version)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ func Connect(config *Config) export.Exporter {
|
|||
exporter.config.Rate = 2 * time.Second
|
||||
}
|
||||
go func() {
|
||||
for _ = range time.Tick(exporter.config.Rate) {
|
||||
for range time.Tick(exporter.config.Rate) {
|
||||
exporter.Flush()
|
||||
}
|
||||
}()
|
||||
|
|
@ -170,7 +170,6 @@ func (e *exporter) send(endpoint string, message interface{}) {
|
|||
if res.Body != nil {
|
||||
res.Body.Close()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func errorInExport(message string, args ...interface{}) {
|
||||
|
|
@ -191,10 +190,10 @@ func toTruncatableString(s string) *wire.TruncatableString {
|
|||
|
||||
func convertSpan(span *telemetry.Span) *wire.Span {
|
||||
result := &wire.Span{
|
||||
TraceId: span.ID.TraceID[:],
|
||||
SpanId: span.ID.SpanID[:],
|
||||
TraceID: span.ID.TraceID[:],
|
||||
SpanID: span.ID.SpanID[:],
|
||||
TraceState: nil, //TODO?
|
||||
ParentSpanId: span.ParentID[:],
|
||||
ParentSpanID: span.ParentID[:],
|
||||
Name: toTruncatableString(span.Name),
|
||||
Kind: wire.UnspecifiedSpanKind,
|
||||
StartTime: convertTimestamp(span.Start),
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ func (DoubleAttribute) tagAttribute() {}
|
|||
|
||||
type StackTrace struct {
|
||||
StackFrames *StackFrames `json:"stack_frames,omitempty"`
|
||||
StackTraceHashId uint64 `json:"stack_trace_hash_id,omitempty"`
|
||||
StackTraceHashID uint64 `json:"stack_trace_hash_id,omitempty"`
|
||||
}
|
||||
|
||||
type StackFrames struct {
|
||||
|
|
@ -75,7 +75,7 @@ type StackFrame struct {
|
|||
|
||||
type Module struct {
|
||||
Module *TruncatableString `json:"module,omitempty"`
|
||||
BuildId *TruncatableString `json:"build_id,omitempty"`
|
||||
BuildID *TruncatableString `json:"build_id,omitempty"`
|
||||
}
|
||||
|
||||
type ProcessIdentifier struct {
|
||||
|
|
|
|||
|
|
@ -11,10 +11,10 @@ type ExportTraceServiceRequest struct {
|
|||
}
|
||||
|
||||
type Span struct {
|
||||
TraceId []byte `json:"trace_id,omitempty"`
|
||||
SpanId []byte `json:"span_id,omitempty"`
|
||||
TraceID []byte `json:"trace_id,omitempty"`
|
||||
SpanID []byte `json:"span_id,omitempty"`
|
||||
TraceState *TraceState `json:"tracestate,omitempty"`
|
||||
ParentSpanId []byte `json:"parent_span_id,omitempty"`
|
||||
ParentSpanID []byte `json:"parent_span_id,omitempty"`
|
||||
Name *TruncatableString `json:"name,omitempty"`
|
||||
Kind SpanKind `json:"kind,omitempty"`
|
||||
StartTime Timestamp `json:"start_time,omitempty"`
|
||||
|
|
@ -65,7 +65,7 @@ type Annotation struct {
|
|||
|
||||
type MessageEvent struct {
|
||||
Type MessageEventType `json:"type,omitempty"`
|
||||
Id uint64 `json:"id,omitempty"`
|
||||
ID uint64 `json:"id,omitempty"`
|
||||
UncompressedSize uint64 `json:"uncompressed_size,omitempty"`
|
||||
CompressedSize uint64 `json:"compressed_size,omitempty"`
|
||||
}
|
||||
|
|
@ -91,8 +91,8 @@ type Links struct {
|
|||
}
|
||||
|
||||
type Link struct {
|
||||
TraceId []byte `json:"trace_id,omitempty"`
|
||||
SpanId []byte `json:"span_id,omitempty"`
|
||||
TraceID []byte `json:"trace_id,omitempty"`
|
||||
SpanID []byte `json:"span_id,omitempty"`
|
||||
Type LinkType `json:"type,omitempty"`
|
||||
Attributes *Attributes `json:"attributes,omitempty"`
|
||||
TraceState *TraceState `json:"tracestate,omitempty"`
|
||||
|
|
|
|||
|
|
@ -78,19 +78,19 @@ func parseInlineLink(s string) (link string, length int) {
|
|||
return
|
||||
}
|
||||
if urlEnd == end {
|
||||
simpleUrl := ""
|
||||
simpleURL := ""
|
||||
url, err := url.Parse(rawURL)
|
||||
if err == nil {
|
||||
// If the URL is http://foo.com, drop the http://
|
||||
// In other words, render [[http://golang.org]] as:
|
||||
// <a href="http://golang.org">golang.org</a>
|
||||
if strings.HasPrefix(rawURL, url.Scheme+"://") {
|
||||
simpleUrl = strings.TrimPrefix(rawURL, url.Scheme+"://")
|
||||
simpleURL = strings.TrimPrefix(rawURL, url.Scheme+"://")
|
||||
} else if strings.HasPrefix(rawURL, url.Scheme+":") {
|
||||
simpleUrl = strings.TrimPrefix(rawURL, url.Scheme+":")
|
||||
simpleURL = strings.TrimPrefix(rawURL, url.Scheme+":")
|
||||
}
|
||||
}
|
||||
return renderLink(rawURL, simpleUrl), end + 2
|
||||
return renderLink(rawURL, simpleURL), end + 2
|
||||
}
|
||||
if s[urlEnd:urlEnd+2] != "][" {
|
||||
return
|
||||
|
|
|
|||
|
|
@ -402,7 +402,7 @@ func parseSections(ctx *Context, name string, lines *Lines, number []int) ([]Sec
|
|||
}
|
||||
parser := parsers[args[0]]
|
||||
if parser == nil {
|
||||
return nil, fmt.Errorf("%s:%d: unknown command %q\n", name, lines.line, text)
|
||||
return nil, fmt.Errorf("%s:%d: unknown command %q", name, lines.line, text)
|
||||
}
|
||||
t, err := parser(ctx, name, lines.line, text)
|
||||
if err != nil {
|
||||
|
|
|
|||
Loading…
Reference in New Issue