cmd/compile: use exported field names in rulegen

The types used while generating code, such as Rule and File, have been
exported for a while. This is harmless for a main package, and lets us
easily differentiate types from variables and functions, as well as use
names like "If" since "if" is a keyword.

However, the fields remained unexported. This was a bit inconsistent,
and also meant that we couldn't use some intuitive names like If.else.
Export them.

Besides the capitalization, the only change is that the If type now has
the fields Then and Else, instead of stmt and alt.

Change-Id: I426ff140c6ca186fec394f17b29165861da5fd98
Reviewed-on: https://go-review.googlesource.com/c/go/+/228821
Reviewed-by: Josh Bleecher Snyder <josharian@gmail.com>
Run-TryBot: Daniel Martí <mvdan@mvdan.cc>
TryBot-Result: Gobot Gobot <gobot@golang.org>
This commit is contained in:
Daniel Martí 2020-04-18 14:38:09 +01:00
parent 98c6b9844b
commit f5291cf03d
2 changed files with 151 additions and 148 deletions

View File

@ -26,6 +26,9 @@ import (
"sync"
)
// TODO: capitalize these types, so that we can more easily tell variable names
// apart from type names, and avoid awkward func parameters like "arch arch".
type arch struct {
name string
pkg string // obj package to import for this arch.

View File

@ -62,12 +62,12 @@ var (
)
type Rule struct {
rule string
loc string // file name & line number
Rule string
Loc string // file name & line number
}
func (r Rule) String() string {
return fmt.Sprintf("rule %q at %s", r.rule, r.loc)
return fmt.Sprintf("rule %q at %s", r.Rule, r.Loc)
}
func normalizeSpaces(s string) string {
@ -78,11 +78,11 @@ func normalizeSpaces(s string) string {
// parse also reports whether the generated code should use strongly typed aux and auxint fields.
func (r Rule) parse() (match, cond, result string, typed bool) {
arrow := "->"
if strings.Contains(r.rule, "=>") {
if strings.Contains(r.Rule, "=>") {
arrow = "=>"
typed = true
}
s := strings.Split(r.rule, arrow)
s := strings.Split(r.Rule, arrow)
match = normalizeSpaces(s[0])
result = normalizeSpaces(s[1])
cond = ""
@ -145,7 +145,7 @@ func genRulesSuffix(arch arch, suff string) {
loc := fmt.Sprintf("%s%s.rules:%d", arch.name, suff, ruleLineno)
for _, rule2 := range expandOr(rule) {
r := Rule{rule: rule2, loc: loc}
r := Rule{Rule: rule2, Loc: loc}
if rawop := strings.Split(rule2, " ")[0][1:]; isBlock(rawop, arch) {
blockrules[rawop] = append(blockrules[rawop], r)
continue
@ -173,25 +173,25 @@ func genRulesSuffix(arch arch, suff string) {
}
sort.Strings(ops)
genFile := &File{arch: arch, suffix: suff}
genFile := &File{Arch: arch, Suffix: suff}
// Main rewrite routine is a switch on v.Op.
fn := &Func{kind: "Value", arglen: -1}
fn := &Func{Kind: "Value", ArgLen: -1}
sw := &Switch{expr: exprf("v.Op")}
sw := &Switch{Expr: exprf("v.Op")}
for _, op := range ops {
eop, ok := parseEllipsisRules(oprules[op], arch)
if ok {
if strings.Contains(oprules[op][0].rule, "=>") && opByName(arch, op).aux != opByName(arch, eop).aux {
if strings.Contains(oprules[op][0].Rule, "=>") && opByName(arch, op).aux != opByName(arch, eop).aux {
panic(fmt.Sprintf("can't use ... for ops that have different aux types: %s and %s", op, eop))
}
swc := &Case{expr: exprf(op)}
swc := &Case{Expr: exprf(op)}
swc.add(stmtf("v.Op = %s", eop))
swc.add(stmtf("return true"))
sw.add(swc)
continue
}
swc := &Case{expr: exprf(op)}
swc := &Case{Expr: exprf(op)}
swc.add(stmtf("return rewriteValue%s%s_%s(v)", arch.name, suff, op))
sw.add(swc)
}
@ -212,34 +212,34 @@ func genRulesSuffix(arch arch, suff string) {
// that the previous rule wasn't unconditional.
var rr *RuleRewrite
fn := &Func{
kind: "Value",
suffix: fmt.Sprintf("_%s", op),
arglen: opByName(arch, op).argLength,
Kind: "Value",
Suffix: fmt.Sprintf("_%s", op),
ArgLen: opByName(arch, op).argLength,
}
fn.add(declf("b", "v.Block"))
fn.add(declf("config", "b.Func.Config"))
fn.add(declf("fe", "b.Func.fe"))
fn.add(declf("typ", "&b.Func.Config.Types"))
for _, rule := range rules {
if rr != nil && !rr.canFail {
log.Fatalf("unconditional rule %s is followed by other rules", rr.match)
if rr != nil && !rr.CanFail {
log.Fatalf("unconditional rule %s is followed by other rules", rr.Match)
}
rr = &RuleRewrite{loc: rule.loc}
rr.match, rr.cond, rr.result, rr.typed = rule.parse()
pos, _ := genMatch(rr, arch, rr.match, fn.arglen >= 0)
rr = &RuleRewrite{Loc: rule.Loc}
rr.Match, rr.Cond, rr.Result, rr.Typed = rule.parse()
pos, _ := genMatch(rr, arch, rr.Match, fn.ArgLen >= 0)
if pos == "" {
pos = "v.Pos"
}
if rr.cond != "" {
rr.add(breakf("!(%s)", rr.cond))
if rr.Cond != "" {
rr.add(breakf("!(%s)", rr.Cond))
}
genResult(rr, arch, rr.result, pos)
genResult(rr, arch, rr.Result, pos)
if *genLog {
rr.add(stmtf("logRule(%q)", rule.loc))
rr.add(stmtf("logRule(%q)", rule.Loc))
}
fn.add(rr)
}
if rr.canFail {
if rr.CanFail {
fn.add(stmtf("return false"))
}
genFile.add(fn)
@ -247,11 +247,11 @@ func genRulesSuffix(arch arch, suff string) {
// Generate block rewrite function. There are only a few block types
// so we can make this one function with a switch.
fn = &Func{kind: "Block"}
fn = &Func{Kind: "Block"}
fn.add(declf("config", "b.Func.Config"))
fn.add(declf("typ", "&b.Func.Config.Types"))
sw = &Switch{expr: exprf("b.Kind")}
sw = &Switch{Expr: exprf("b.Kind")}
ops = ops[:0]
for op := range blockrules {
ops = append(ops, op)
@ -259,7 +259,7 @@ func genRulesSuffix(arch arch, suff string) {
sort.Strings(ops)
for _, op := range ops {
name, data := getBlockInfo(op, arch)
swc := &Case{expr: exprf("%s", name)}
swc := &Case{Expr: exprf("%s", name)}
for _, rule := range blockrules[op] {
swc.add(genBlockRewrite(rule, arch, data))
}
@ -575,7 +575,7 @@ func fprint(w io.Writer, n Node) {
case *File:
file := n
seenRewrite := make(map[[3]string]string)
fmt.Fprintf(w, "// Code generated from gen/%s%s.rules; DO NOT EDIT.\n", n.arch.name, n.suffix)
fmt.Fprintf(w, "// Code generated from gen/%s%s.rules; DO NOT EDIT.\n", n.Arch.name, n.Suffix)
fmt.Fprintf(w, "// generated with: cd gen; go run *.go\n")
fmt.Fprintf(w, "\npackage ssa\n")
for _, path := range append([]string{
@ -584,31 +584,31 @@ func fprint(w io.Writer, n Node) {
"cmd/internal/obj",
"cmd/internal/objabi",
"cmd/compile/internal/types",
}, n.arch.imports...) {
}, n.Arch.imports...) {
fmt.Fprintf(w, "import %q\n", path)
}
for _, f := range n.list {
for _, f := range n.List {
f := f.(*Func)
fmt.Fprintf(w, "func rewrite%s%s%s%s(", f.kind, n.arch.name, n.suffix, f.suffix)
fmt.Fprintf(w, "%c *%s) bool {\n", strings.ToLower(f.kind)[0], f.kind)
if f.kind == "Value" && f.arglen > 0 {
for i := f.arglen - 1; i >= 0; i-- {
fmt.Fprintf(w, "func rewrite%s%s%s%s(", f.Kind, n.Arch.name, n.Suffix, f.Suffix)
fmt.Fprintf(w, "%c *%s) bool {\n", strings.ToLower(f.Kind)[0], f.Kind)
if f.Kind == "Value" && f.ArgLen > 0 {
for i := f.ArgLen - 1; i >= 0; i-- {
fmt.Fprintf(w, "v_%d := v.Args[%d]\n", i, i)
}
}
for _, n := range f.list {
for _, n := range f.List {
fprint(w, n)
if rr, ok := n.(*RuleRewrite); ok {
k := [3]string{
normalizeMatch(rr.match, file.arch),
normalizeWhitespace(rr.cond),
normalizeWhitespace(rr.result),
normalizeMatch(rr.Match, file.Arch),
normalizeWhitespace(rr.Cond),
normalizeWhitespace(rr.Result),
}
if prev, ok := seenRewrite[k]; ok {
log.Fatalf("duplicate rule %s, previously seen at %s\n", rr.loc, prev)
log.Fatalf("duplicate rule %s, previously seen at %s\n", rr.Loc, prev)
} else {
seenRewrite[k] = rr.loc
seenRewrite[k] = rr.Loc
}
}
}
@ -616,47 +616,47 @@ func fprint(w io.Writer, n Node) {
}
case *Switch:
fmt.Fprintf(w, "switch ")
fprint(w, n.expr)
fprint(w, n.Expr)
fmt.Fprintf(w, " {\n")
for _, n := range n.list {
for _, n := range n.List {
fprint(w, n)
}
fmt.Fprintf(w, "}\n")
case *If:
fmt.Fprintf(w, "if ")
fprint(w, n.expr)
fprint(w, n.Cond)
fmt.Fprintf(w, " {\n")
fprint(w, n.stmt)
if n.alt != nil {
fprint(w, n.Then)
if n.Else != nil {
fmt.Fprintf(w, "} else {\n")
fprint(w, n.alt)
fprint(w, n.Else)
}
fmt.Fprintf(w, "}\n")
case *Case:
fmt.Fprintf(w, "case ")
fprint(w, n.expr)
fprint(w, n.Expr)
fmt.Fprintf(w, ":\n")
for _, n := range n.list {
for _, n := range n.List {
fprint(w, n)
}
case *RuleRewrite:
if *addLine {
fmt.Fprintf(w, "// %s\n", n.loc)
fmt.Fprintf(w, "// %s\n", n.Loc)
}
fmt.Fprintf(w, "// match: %s\n", n.match)
if n.cond != "" {
fmt.Fprintf(w, "// cond: %s\n", n.cond)
fmt.Fprintf(w, "// match: %s\n", n.Match)
if n.Cond != "" {
fmt.Fprintf(w, "// cond: %s\n", n.Cond)
}
fmt.Fprintf(w, "// result: %s\n", n.result)
fmt.Fprintf(w, "for %s {\n", n.check)
fmt.Fprintf(w, "// result: %s\n", n.Result)
fmt.Fprintf(w, "for %s {\n", n.Check)
nCommutative := 0
for _, n := range n.list {
for _, n := range n.List {
if b, ok := n.(*CondBreak); ok {
b.insideCommuteLoop = nCommutative > 0
b.InsideCommuteLoop = nCommutative > 0
}
fprint(w, n)
if loop, ok := n.(StartCommuteLoop); ok {
if nCommutative != loop.depth {
if nCommutative != loop.Depth {
panic("mismatch commute loop depth")
}
nCommutative++
@ -666,19 +666,19 @@ func fprint(w io.Writer, n Node) {
for i := 0; i < nCommutative; i++ {
fmt.Fprintln(w, "}")
}
if n.commuteDepth > 0 && n.canFail {
if n.CommuteDepth > 0 && n.CanFail {
fmt.Fprint(w, "break\n")
}
fmt.Fprintf(w, "}\n")
case *Declare:
fmt.Fprintf(w, "%s := ", n.name)
fprint(w, n.value)
fmt.Fprintf(w, "%s := ", n.Name)
fprint(w, n.Value)
fmt.Fprintln(w)
case *CondBreak:
fmt.Fprintf(w, "if ")
fprint(w, n.expr)
fprint(w, n.Cond)
fmt.Fprintf(w, " {\n")
if n.insideCommuteLoop {
if n.InsideCommuteLoop {
fmt.Fprintf(w, "continue")
} else {
fmt.Fprintf(w, "break")
@ -690,7 +690,7 @@ func fprint(w io.Writer, n Node) {
fmt.Fprintln(w)
}
case StartCommuteLoop:
fmt.Fprintf(w, "for _i%[1]d := 0; _i%[1]d <= 1; _i%[1]d, %[2]s_0, %[2]s_1 = _i%[1]d + 1, %[2]s_1, %[2]s_0 {\n", n.depth, n.v)
fmt.Fprintf(w, "for _i%[1]d := 0; _i%[1]d <= 1; _i%[1]d, %[2]s_0, %[2]s_1 = _i%[1]d + 1, %[2]s_1, %[2]s_0 {\n", n.Depth, n.V)
default:
log.Fatalf("cannot print %T", n)
}
@ -709,33 +709,33 @@ type Node interface{}
// ast.Stmt under some limited circumstances.
type Statement interface{}
// bodyBase is shared by all of our statement pseudo-node types which can
// BodyBase is shared by all of our statement pseudo-node types which can
// contain other statements.
type bodyBase struct {
list []Statement
canFail bool
type BodyBase struct {
List []Statement
CanFail bool
}
func (w *bodyBase) add(node Statement) {
func (w *BodyBase) add(node Statement) {
var last Statement
if len(w.list) > 0 {
last = w.list[len(w.list)-1]
if len(w.List) > 0 {
last = w.List[len(w.List)-1]
}
if node, ok := node.(*CondBreak); ok {
w.canFail = true
w.CanFail = true
if last, ok := last.(*CondBreak); ok {
// Add to the previous "if <cond> { break }" via a
// logical OR, which will save verbosity.
last.expr = &ast.BinaryExpr{
last.Cond = &ast.BinaryExpr{
Op: token.LOR,
X: last.expr,
Y: node.expr,
X: last.Cond,
Y: node.Cond,
}
return
}
}
w.list = append(w.list, node)
w.List = append(w.List, node)
}
// predeclared contains globally known tokens that should not be redefined.
@ -746,15 +746,15 @@ var predeclared = map[string]bool{
}
// declared reports if the body contains a Declare with the given name.
func (w *bodyBase) declared(name string) bool {
func (w *BodyBase) declared(name string) bool {
if predeclared[name] {
// Treat predeclared names as having already been declared.
// This lets us use nil to match an aux field or
// true and false to match an auxint field.
return true
}
for _, s := range w.list {
if decl, ok := s.(*Declare); ok && decl.name == name {
for _, s := range w.List {
if decl, ok := s.(*Declare); ok && decl.Name == name {
return true
}
}
@ -769,51 +769,51 @@ func (w *bodyBase) declared(name string) bool {
// nodes.
type (
File struct {
bodyBase // []*Func
arch arch
suffix string
BodyBase // []*Func
Arch arch
Suffix string
}
Func struct {
bodyBase
kind string // "Value" or "Block"
suffix string
arglen int32 // if kind == "Value", number of args for this op
BodyBase
Kind string // "Value" or "Block"
Suffix string
ArgLen int32 // if kind == "Value", number of args for this op
}
If struct {
expr ast.Expr
stmt Statement
alt Statement
Cond ast.Expr
Then Statement
Else Statement
}
Switch struct {
bodyBase // []*Case
expr ast.Expr
BodyBase // []*Case
Expr ast.Expr
}
Case struct {
bodyBase
expr ast.Expr
BodyBase
Expr ast.Expr
}
RuleRewrite struct {
bodyBase
match, cond, result string // top comments
check string // top-level boolean expression
BodyBase
Match, Cond, Result string // top comments
Check string // top-level boolean expression
alloc int // for unique var names
loc string // file name & line number of the original rule
commuteDepth int // used to track depth of commute loops
typed bool // aux and auxint fields should be strongly typed
Alloc int // for unique var names
Loc string // file name & line number of the original rule
CommuteDepth int // used to track depth of commute loops
Typed bool // aux and auxint fields should be strongly typed
}
Declare struct {
name string
value ast.Expr
Name string
Value ast.Expr
}
// TODO: implement CondBreak as If + Break instead?
CondBreak struct {
expr ast.Expr
insideCommuteLoop bool
Cond ast.Expr
InsideCommuteLoop bool
}
StartCommuteLoop struct {
depth int
v string
Depth int
V string
}
)
@ -850,13 +850,13 @@ func declf(name, format string, a ...interface{}) *Declare {
// breakf constructs a simple "if cond { break }" statement, using exprf for its
// condition.
func breakf(format string, a ...interface{}) *CondBreak {
return &CondBreak{expr: exprf(format, a...)}
return &CondBreak{Cond: exprf(format, a...)}
}
func genBlockRewrite(rule Rule, arch arch, data blockData) *RuleRewrite {
rr := &RuleRewrite{loc: rule.loc}
rr.match, rr.cond, rr.result, rr.typed = rule.parse()
_, _, auxint, aux, s := extract(rr.match) // remove parens, then split
rr := &RuleRewrite{Loc: rule.Loc}
rr.Match, rr.Cond, rr.Result, rr.Typed = rule.parse()
_, _, auxint, aux, s := extract(rr.Match) // remove parens, then split
// check match of control values
if len(s) < data.controls {
@ -873,10 +873,10 @@ func genBlockRewrite(rule Rule, arch arch, data blockData) *RuleRewrite {
p, op := genMatch0(rr, arch, arg, vname, nil, false) // TODO: pass non-nil cnt?
if op != "" {
check := fmt.Sprintf("%s.Op == %s", cname, op)
if rr.check == "" {
rr.check = check
if rr.Check == "" {
rr.Check = check
} else {
rr.check = rr.check + " && " + check
rr.Check = rr.Check + " && " + check
}
}
if p == "" {
@ -897,7 +897,7 @@ func genBlockRewrite(rule Rule, arch arch, data blockData) *RuleRewrite {
if e.name == "" {
continue
}
if !rr.typed {
if !rr.Typed {
if !token.IsIdentifier(e.name) || rr.declared(e.name) {
// code or variable
rr.add(breakf("b.%s != %s", e.field, e.name))
@ -916,12 +916,12 @@ func genBlockRewrite(rule Rule, arch arch, data blockData) *RuleRewrite {
rr.add(declf(e.name, "%sTo%s(b.%s)", unTitle(e.field), title(e.dclType), e.field))
}
}
if rr.cond != "" {
rr.add(breakf("!(%s)", rr.cond))
if rr.Cond != "" {
rr.add(breakf("!(%s)", rr.Cond))
}
// Rule matches. Generate result.
outop, _, auxint, aux, t := extract(rr.result) // remove parens, then split
outop, _, auxint, aux, t := extract(rr.Result) // remove parens, then split
blockName, outdata := getBlockInfo(outop, arch)
if len(t) < outdata.controls {
log.Fatalf("incorrect number of output arguments in %s, got %v wanted at least %v", rule, len(s), outdata.controls)
@ -974,7 +974,7 @@ func genBlockRewrite(rule Rule, arch arch, data blockData) *RuleRewrite {
}
if auxint != "" {
if rr.typed {
if rr.Typed {
// Make sure auxint value has the right type.
rr.add(stmtf("b.AuxInt = %sToAuxInt(%s)", unTitle(outdata.auxIntType()), auxint))
} else {
@ -982,7 +982,7 @@ func genBlockRewrite(rule Rule, arch arch, data blockData) *RuleRewrite {
}
}
if aux != "" {
if rr.typed {
if rr.Typed {
// Make sure aux value has the right type.
rr.add(stmtf("b.Aux = %sToAux(%s)", unTitle(outdata.auxType()), aux))
} else {
@ -1007,7 +1007,7 @@ func genBlockRewrite(rule Rule, arch arch, data blockData) *RuleRewrite {
}
if *genLog {
rr.add(stmtf("logRule(%q)", rule.loc))
rr.add(stmtf("logRule(%q)", rule.Loc))
}
return rr
}
@ -1021,9 +1021,9 @@ func genMatch(rr *RuleRewrite, arch arch, match string, pregenTop bool) (pos, ch
func genMatch0(rr *RuleRewrite, arch arch, match, v string, cnt map[string]int, pregenTop bool) (pos, checkOp string) {
if match[0] != '(' || match[len(match)-1] != ')' {
log.Fatalf("%s: non-compound expr in genMatch0: %q", rr.loc, match)
log.Fatalf("%s: non-compound expr in genMatch0: %q", rr.Loc, match)
}
op, oparch, typ, auxint, aux, args := parseValue(match, arch, rr.loc)
op, oparch, typ, auxint, aux, args := parseValue(match, arch, rr.Loc)
checkOp = fmt.Sprintf("Op%s%s", oparch, op.name)
@ -1042,7 +1042,7 @@ func genMatch0(rr *RuleRewrite, arch arch, match, v string, cnt map[string]int,
if e.name == "" {
continue
}
if !rr.typed {
if !rr.Typed {
if !token.IsIdentifier(e.name) || rr.declared(e.name) {
// code or variable
rr.add(breakf("%s.%s != %s", v, e.field, e.name))
@ -1118,9 +1118,9 @@ func genMatch0(rr *RuleRewrite, arch arch, match, v string, cnt map[string]int,
}
var commuteDepth int
if commutative {
commuteDepth = rr.commuteDepth
commuteDepth = rr.CommuteDepth
rr.add(StartCommuteLoop{commuteDepth, v})
rr.commuteDepth++
rr.CommuteDepth++
}
for i, arg := range args {
if arg == "_" {
@ -1160,7 +1160,7 @@ func genMatch0(rr *RuleRewrite, arch arch, match, v string, cnt map[string]int,
rr.add(declf(argname, "%s", rhs))
}
bexpr := exprf("%s.Op != addLater", argname)
rr.add(&CondBreak{expr: bexpr})
rr.add(&CondBreak{Cond: bexpr})
argPos, argCheckOp := genMatch0(rr, arch, expr, argname, cnt, false)
bexpr.(*ast.BinaryExpr).Y.(*ast.Ident).Name = argCheckOp
@ -1205,7 +1205,7 @@ func genResult0(rr *RuleRewrite, arch arch, result string, top, move bool, pos s
return result
}
op, oparch, typ, auxint, aux, args := parseValue(result, arch, rr.loc)
op, oparch, typ, auxint, aux, args := parseValue(result, arch, rr.Loc)
// Find the type of the variable.
typeOverride := typ != ""
@ -1221,10 +1221,10 @@ func genResult0(rr *RuleRewrite, arch arch, result string, top, move bool, pos s
}
} else {
if typ == "" {
log.Fatalf("sub-expression %s (op=Op%s%s) at %s must have a type", result, oparch, op.name, rr.loc)
log.Fatalf("sub-expression %s (op=Op%s%s) at %s must have a type", result, oparch, op.name, rr.Loc)
}
v = fmt.Sprintf("v%d", rr.alloc)
rr.alloc++
v = fmt.Sprintf("v%d", rr.Alloc)
rr.Alloc++
rr.add(declf(v, "b.NewValue0(%s, Op%s%s, %s)", pos, oparch, op.name, typ))
if move && top {
// Rewrite original into a copy
@ -1233,7 +1233,7 @@ func genResult0(rr *RuleRewrite, arch arch, result string, top, move bool, pos s
}
if auxint != "" {
if rr.typed {
if rr.Typed {
// Make sure auxint value has the right type.
rr.add(stmtf("%s.AuxInt = %sToAuxInt(%s)", v, unTitle(op.auxIntType()), auxint))
} else {
@ -1241,7 +1241,7 @@ func genResult0(rr *RuleRewrite, arch arch, result string, top, move bool, pos s
}
}
if aux != "" {
if rr.typed {
if rr.Typed {
// Make sure aux value has the right type.
rr.add(stmtf("%s.Aux = %sToAux(%s)", v, unTitle(op.auxType()), aux))
} else {
@ -1570,14 +1570,14 @@ func expandOr(r string) []string {
}
// varCount returns a map which counts the number of occurrences of
// Value variables in the s-expression rr.match and the Go expression rr.cond.
// Value variables in the s-expression rr.Match and the Go expression rr.Cond.
func varCount(rr *RuleRewrite) map[string]int {
cnt := map[string]int{}
varCount1(rr.loc, rr.match, cnt)
if rr.cond != "" {
expr, err := parser.ParseExpr(rr.cond)
varCount1(rr.Loc, rr.Match, cnt)
if rr.Cond != "" {
expr, err := parser.ParseExpr(rr.Cond)
if err != nil {
log.Fatalf("%s: failed to parse cond %q: %v", rr.loc, rr.cond, err)
log.Fatalf("%s: failed to parse cond %q: %v", rr.Loc, rr.Cond, err)
}
ast.Inspect(expr, func(n ast.Node) bool {
if id, ok := n.(*ast.Ident); ok {
@ -1668,8 +1668,8 @@ func normalizeMatch(m string, arch arch) string {
func parseEllipsisRules(rules []Rule, arch arch) (newop string, ok bool) {
if len(rules) != 1 {
for _, r := range rules {
if strings.Contains(r.rule, "...") {
log.Fatalf("%s: found ellipsis in rule, but there are other rules with the same op", r.loc)
if strings.Contains(r.Rule, "...") {
log.Fatalf("%s: found ellipsis in rule, but there are other rules with the same op", r.Loc)
}
}
return "", false
@ -1677,13 +1677,13 @@ func parseEllipsisRules(rules []Rule, arch arch) (newop string, ok bool) {
rule := rules[0]
match, cond, result, _ := rule.parse()
if cond != "" || !isEllipsisValue(match) || !isEllipsisValue(result) {
if strings.Contains(rule.rule, "...") {
log.Fatalf("%s: found ellipsis in non-ellipsis rule", rule.loc)
if strings.Contains(rule.Rule, "...") {
log.Fatalf("%s: found ellipsis in non-ellipsis rule", rule.Loc)
}
checkEllipsisRuleCandidate(rule, arch)
return "", false
}
op, oparch, _, _, _, _ := parseValue(result, arch, rule.loc)
op, oparch, _, _, _, _ := parseValue(result, arch, rule.Loc)
return fmt.Sprintf("Op%s%s", oparch, op.name), true
}
@ -1704,7 +1704,7 @@ func checkEllipsisRuleCandidate(rule Rule, arch arch) {
if cond != "" {
return
}
op, _, _, auxint, aux, args := parseValue(match, arch, rule.loc)
op, _, _, auxint, aux, args := parseValue(match, arch, rule.Loc)
var auxint2, aux2 string
var args2 []string
var usingCopy string
@ -1714,13 +1714,13 @@ func checkEllipsisRuleCandidate(rule Rule, arch arch) {
args2 = []string{result}
usingCopy = " using Copy"
} else {
eop, _, _, auxint2, aux2, args2 = parseValue(result, arch, rule.loc)
eop, _, _, auxint2, aux2, args2 = parseValue(result, arch, rule.Loc)
}
// Check that all restrictions in match are reproduced exactly in result.
if aux != aux2 || auxint != auxint2 || len(args) != len(args2) {
return
}
if strings.Contains(rule.rule, "=>") && op.aux != eop.aux {
if strings.Contains(rule.Rule, "=>") && op.aux != eop.aux {
return
}
for i := range args {
@ -1730,11 +1730,11 @@ func checkEllipsisRuleCandidate(rule Rule, arch arch) {
}
switch {
case opHasAux(op) && aux == "" && aux2 == "":
fmt.Printf("%s: rule silently zeros aux, either copy aux or explicitly zero\n", rule.loc)
fmt.Printf("%s: rule silently zeros aux, either copy aux or explicitly zero\n", rule.Loc)
case opHasAuxInt(op) && auxint == "" && auxint2 == "":
fmt.Printf("%s: rule silently zeros auxint, either copy auxint or explicitly zero\n", rule.loc)
fmt.Printf("%s: rule silently zeros auxint, either copy auxint or explicitly zero\n", rule.Loc)
default:
fmt.Printf("%s: possible ellipsis rule candidate%s: %q\n", rule.loc, usingCopy, rule.rule)
fmt.Printf("%s: possible ellipsis rule candidate%s: %q\n", rule.Loc, usingCopy, rule.Rule)
}
}