diff --git a/src/cmd/compile/internal/gc/const.go b/src/cmd/compile/internal/gc/const.go index 4a61c77630..8771d82cfa 100644 --- a/src/cmd/compile/internal/gc/const.go +++ b/src/cmd/compile/internal/gc/const.go @@ -526,7 +526,7 @@ func evalConst(n ir.Node) ir.Node { if need == 1 { var strs []string for _, c := range s { - strs = append(strs, c.StringVal()) + strs = append(strs, ir.StringVal(c)) } return origConst(n, constant.MakeString(strings.Join(strs, ""))) } @@ -537,7 +537,7 @@ func evalConst(n ir.Node) ir.Node { var strs []string i2 := i for i2 < len(s) && ir.IsConst(s[i2], constant.String) { - strs = append(strs, s[i2].StringVal()) + strs = append(strs, ir.StringVal(s[i2])) i2++ } @@ -558,7 +558,7 @@ func evalConst(n ir.Node) ir.Node { switch nl.Type().Kind() { case types.TSTRING: if ir.IsConst(nl, constant.String) { - return origIntConst(n, int64(len(nl.StringVal()))) + return origIntConst(n, int64(len(ir.StringVal(nl)))) } case types.TARRAY: if !hascallchan(nl) { @@ -780,7 +780,7 @@ func indexconst(n ir.Node) int64 { if doesoverflow(v, types.Types[types.TINT]) { return -2 } - return ir.Int64Val(types.Types[types.TINT], v) + return ir.IntVal(types.Types[types.TINT], v) } // isGoConst reports whether n is a Go language constant (as opposed to a diff --git a/src/cmd/compile/internal/gc/escape.go b/src/cmd/compile/internal/gc/escape.go index 9fc3dd2778..622edb9820 100644 --- a/src/cmd/compile/internal/gc/escape.go +++ b/src/cmd/compile/internal/gc/escape.go @@ -1769,7 +1769,7 @@ func heapAllocReason(n ir.Node) string { if !smallintconst(r) { return "non-constant size" } - if t := n.Type(); t.Elem().Width != 0 && r.Int64Val() >= maxImplicitStackVarSize/t.Elem().Width { + if t := n.Type(); t.Elem().Width != 0 && ir.Int64Val(r) >= maxImplicitStackVarSize/t.Elem().Width { return "too large for stack" } } diff --git a/src/cmd/compile/internal/gc/noder.go b/src/cmd/compile/internal/gc/noder.go index 9352463f18..61320123a8 100644 --- a/src/cmd/compile/internal/gc/noder.go +++ b/src/cmd/compile/internal/gc/noder.go @@ -807,7 +807,7 @@ func (p *noder) sum(x syntax.Expr) ir.Node { n := p.expr(x) if ir.IsConst(n, constant.String) && n.Sym() == nil { nstr = n - chunks = append(chunks, nstr.StringVal()) + chunks = append(chunks, ir.StringVal(nstr)) } for i := len(adds) - 1; i >= 0; i-- { @@ -817,12 +817,12 @@ func (p *noder) sum(x syntax.Expr) ir.Node { if ir.IsConst(r, constant.String) && r.Sym() == nil { if nstr != nil { // Collapse r into nstr instead of adding to n. - chunks = append(chunks, r.StringVal()) + chunks = append(chunks, ir.StringVal(r)) continue } nstr = r - chunks = append(chunks, nstr.StringVal()) + chunks = append(chunks, ir.StringVal(nstr)) } else { if len(chunks) > 1 { nstr.SetVal(constant.MakeString(strings.Join(chunks, ""))) diff --git a/src/cmd/compile/internal/gc/obj.go b/src/cmd/compile/internal/gc/obj.go index 21a50257b8..b1701b30a1 100644 --- a/src/cmd/compile/internal/gc/obj.go +++ b/src/cmd/compile/internal/gc/obj.go @@ -263,7 +263,7 @@ func dumpGlobalConst(n ir.Node) { return } } - base.Ctxt.DwarfIntConst(base.Ctxt.Pkgpath, n.Sym().Name, typesymname(t), ir.Int64Val(t, v)) + base.Ctxt.DwarfIntConst(base.Ctxt.Pkgpath, n.Sym().Name, typesymname(t), ir.IntVal(t, v)) } func dumpglobls() { @@ -598,7 +598,7 @@ func litsym(n, c ir.Node, wid int) { s.WriteInt(base.Ctxt, n.Offset(), wid, i) case constant.Int: - s.WriteInt(base.Ctxt, n.Offset(), wid, ir.Int64Val(n.Type(), u)) + s.WriteInt(base.Ctxt, n.Offset(), wid, ir.IntVal(n.Type(), u)) case constant.Float: f, _ := constant.Float64Val(u) diff --git a/src/cmd/compile/internal/gc/order.go b/src/cmd/compile/internal/gc/order.go index e4175bbf36..5440806e8e 100644 --- a/src/cmd/compile/internal/gc/order.go +++ b/src/cmd/compile/internal/gc/order.go @@ -1107,7 +1107,7 @@ func (o *Order) expr(n, lhs ir.Node) ir.Node { haslit := false for _, n1 := range n.List().Slice() { hasbyte = hasbyte || n1.Op() == ir.OBYTES2STR - haslit = haslit || n1.Op() == ir.OLITERAL && len(n1.StringVal()) != 0 + haslit = haslit || n1.Op() == ir.OLITERAL && len(ir.StringVal(n1)) != 0 } if haslit && hasbyte { @@ -1278,7 +1278,7 @@ func (o *Order) expr(n, lhs ir.Node) ir.Node { var t *types.Type switch n.Op() { case ir.OSLICELIT: - t = types.NewArray(n.Type().Elem(), n.Right().Int64Val()) + t = types.NewArray(n.Type().Elem(), ir.Int64Val(n.Right())) case ir.OCALLPART: t = partialCallType(n) } diff --git a/src/cmd/compile/internal/gc/sinit.go b/src/cmd/compile/internal/gc/sinit.go index 2dc4281857..3ef976d8aa 100644 --- a/src/cmd/compile/internal/gc/sinit.go +++ b/src/cmd/compile/internal/gc/sinit.go @@ -134,7 +134,7 @@ func (s *InitSchedule) staticcopy(l ir.Node, r ir.Node) bool { case ir.OSLICELIT: // copy slice a := s.inittemps[r] - slicesym(l, a, r.Right().Int64Val()) + slicesym(l, a, ir.Int64Val(r.Right())) return true case ir.OARRAYLIT, ir.OSTRUCTLIT: @@ -213,7 +213,7 @@ func (s *InitSchedule) staticassign(l ir.Node, r ir.Node) bool { case ir.OSTR2BYTES: if l.Class() == ir.PEXTERN && r.Left().Op() == ir.OLITERAL { - sval := r.Left().StringVal() + sval := ir.StringVal(r.Left()) slicebytes(l, sval) return true } @@ -221,7 +221,7 @@ func (s *InitSchedule) staticassign(l ir.Node, r ir.Node) bool { case ir.OSLICELIT: s.initplan(r) // Init slice. - bound := r.Right().Int64Val() + bound := ir.Int64Val(r.Right()) ta := types.NewArray(r.Type().Elem(), bound) ta.SetNoalg(true) a := staticname(ta) @@ -418,7 +418,7 @@ func getdyn(n ir.Node, top bool) initGenType { if !top { return initDynamic } - if n.Right().Int64Val()/4 > int64(n.List().Len()) { + if ir.Int64Val(n.Right())/4 > int64(n.List().Len()) { // <25% of entries have explicit values. // Very rough estimation, it takes 4 bytes of instructions // to initialize 1 byte of result. So don't use a static @@ -594,12 +594,12 @@ func isSmallSliceLit(n ir.Node) bool { r := n.Right() - return smallintconst(r) && (n.Type().Elem().Width == 0 || r.Int64Val() <= smallArrayBytes/n.Type().Elem().Width) + return smallintconst(r) && (n.Type().Elem().Width == 0 || ir.Int64Val(r) <= smallArrayBytes/n.Type().Elem().Width) } func slicelit(ctxt initContext, n ir.Node, var_ ir.Node, init *ir.Nodes) { // make an array type corresponding the number of elements we have - t := types.NewArray(n.Type().Elem(), n.Right().Int64Val()) + t := types.NewArray(n.Type().Elem(), ir.Int64Val(n.Right())) dowidth(t) if ctxt == inNonInitFunction { @@ -997,7 +997,7 @@ func oaslit(n ir.Node, init *ir.Nodes) bool { func getlit(lit ir.Node) int { if smallintconst(lit) { - return int(lit.Int64Val()) + return int(ir.Int64Val(lit)) } return -1 } diff --git a/src/cmd/compile/internal/gc/ssa.go b/src/cmd/compile/internal/gc/ssa.go index d53bd1aa4f..89918e2133 100644 --- a/src/cmd/compile/internal/gc/ssa.go +++ b/src/cmd/compile/internal/gc/ssa.go @@ -1271,7 +1271,7 @@ func (s *state) stmt(n ir.Node) { // We're assigning a slicing operation back to its source. // Don't write back fields we aren't changing. See issue #14855. i, j, k := rhs.SliceBounds() - if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && i.Int64Val() == 0) { + if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) { // [0:...] is the same as [:...] i = nil } @@ -1301,7 +1301,7 @@ func (s *state) stmt(n ir.Node) { case ir.OIF: if ir.IsConst(n.Left(), constant.Bool) { s.stmtList(n.Left().Init()) - if n.Left().BoolVal() { + if ir.BoolVal(n.Left()) { s.stmtList(n.Body()) } else { s.stmtList(n.Rlist()) @@ -2041,7 +2041,7 @@ func (s *state) expr(n ir.Node) *ssa.Value { case ir.OLITERAL: switch u := n.Val(); u.Kind() { case constant.Int: - i := ir.Int64Val(n.Type(), u) + i := ir.IntVal(n.Type(), u) switch n.Type().Size() { case 1: return s.constInt8(n.Type(), int8(i)) @@ -2624,7 +2624,7 @@ func (s *state) expr(n ir.Node) *ssa.Value { // Replace "abc"[1] with 'b'. // Delayed until now because "abc"[1] is not an ideal constant. // See test/fixedbugs/issue11370.go. - return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(n.Left().StringVal()[n.Right().Int64Val()]))) + return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.Left())[ir.Int64Val(n.Right())]))) } a := s.expr(n.Left()) i := s.expr(n.Right()) @@ -2633,7 +2633,7 @@ func (s *state) expr(n ir.Node) *ssa.Value { ptrtyp := s.f.Config.Types.BytePtr ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a) if ir.IsConst(n.Right(), constant.Int) { - ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, n.Right().Int64Val(), ptr) + ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Right()), ptr) } else { ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i) } diff --git a/src/cmd/compile/internal/gc/swt.go b/src/cmd/compile/internal/gc/swt.go index 30179e1dd6..e241721588 100644 --- a/src/cmd/compile/internal/gc/swt.go +++ b/src/cmd/compile/internal/gc/swt.go @@ -365,8 +365,8 @@ func (s *exprSwitch) flush() { // all we need here is consistency. We respect this // sorting below. sort.Slice(cc, func(i, j int) bool { - si := cc[i].lo.StringVal() - sj := cc[j].lo.StringVal() + si := ir.StringVal(cc[i].lo) + sj := ir.StringVal(cc[j].lo) if len(si) != len(sj) { return len(si) < len(sj) } @@ -375,7 +375,7 @@ func (s *exprSwitch) flush() { // runLen returns the string length associated with a // particular run of exprClauses. - runLen := func(run []exprClause) int64 { return int64(len(run[0].lo.StringVal())) } + runLen := func(run []exprClause) int64 { return int64(len(ir.StringVal(run[0].lo))) } // Collapse runs of consecutive strings with the same length. var runs [][]exprClause @@ -411,7 +411,7 @@ func (s *exprSwitch) flush() { merged := cc[:1] for _, c := range cc[1:] { last := &merged[len(merged)-1] - if last.jmp == c.jmp && last.hi.Int64Val()+1 == c.lo.Int64Val() { + if last.jmp == c.jmp && ir.Int64Val(last.hi)+1 == ir.Int64Val(c.lo) { last.hi = c.lo } else { merged = append(merged, c) @@ -446,7 +446,7 @@ func (c *exprClause) test(exprname ir.Node) ir.Node { // Optimize "switch true { ...}" and "switch false { ... }". if ir.IsConst(exprname, constant.Bool) && !c.lo.Type().IsInterface() { - if exprname.BoolVal() { + if ir.BoolVal(exprname) { return c.lo } else { return ir.NodAt(c.pos, ir.ONOT, c.lo, nil) diff --git a/src/cmd/compile/internal/gc/typecheck.go b/src/cmd/compile/internal/gc/typecheck.go index 55443ba596..b19481311b 100644 --- a/src/cmd/compile/internal/gc/typecheck.go +++ b/src/cmd/compile/internal/gc/typecheck.go @@ -1054,8 +1054,8 @@ func typecheck1(n ir.Node, top int) (res ir.Node) { base.Errorf("invalid %s index %v (index must be non-negative)", why, n.Right()) } else if t.IsArray() && constant.Compare(x, token.GEQ, constant.MakeInt64(t.NumElem())) { base.Errorf("invalid array index %v (out of bounds for %d-element array)", n.Right(), t.NumElem()) - } else if ir.IsConst(n.Left(), constant.String) && constant.Compare(x, token.GEQ, constant.MakeInt64(int64(len(n.Left().StringVal())))) { - base.Errorf("invalid string index %v (out of bounds for %d-byte string)", n.Right(), len(n.Left().StringVal())) + } else if ir.IsConst(n.Left(), constant.String) && constant.Compare(x, token.GEQ, constant.MakeInt64(int64(len(ir.StringVal(n.Left()))))) { + base.Errorf("invalid string index %v (out of bounds for %d-byte string)", n.Right(), len(ir.StringVal(n.Left()))) } else if doesoverflow(x, types.Types[types.TINT]) { base.Errorf("invalid %s index %v (index too large)", why, n.Right()) } @@ -1146,11 +1146,11 @@ func typecheck1(n ir.Node, top int) (res ir.Node) { l = defaultlit(l, types.Types[types.TINT]) c = defaultlit(c, types.Types[types.TINT]) - if ir.IsConst(l, constant.Int) && l.Int64Val() < 0 { + if ir.IsConst(l, constant.Int) && ir.Int64Val(l) < 0 { base.Fatalf("len for OSLICEHEADER must be non-negative") } - if ir.IsConst(c, constant.Int) && c.Int64Val() < 0 { + if ir.IsConst(c, constant.Int) && ir.Int64Val(c) < 0 { base.Fatalf("cap for OSLICEHEADER must be non-negative") } @@ -2173,8 +2173,8 @@ func checksliceindex(l ir.Node, r ir.Node, tp *types.Type) bool { } else if tp != nil && tp.NumElem() >= 0 && constant.Compare(x, token.GTR, constant.MakeInt64(tp.NumElem())) { base.Errorf("invalid slice index %v (out of bounds for %d-element array)", r, tp.NumElem()) return false - } else if ir.IsConst(l, constant.String) && constant.Compare(x, token.GTR, constant.MakeInt64(int64(len(l.StringVal())))) { - base.Errorf("invalid slice index %v (out of bounds for %d-byte string)", r, len(l.StringVal())) + } else if ir.IsConst(l, constant.String) && constant.Compare(x, token.GTR, constant.MakeInt64(int64(len(ir.StringVal(l))))) { + base.Errorf("invalid slice index %v (out of bounds for %d-byte string)", r, len(ir.StringVal(l))) return false } else if doesoverflow(x, types.Types[types.TINT]) { base.Errorf("invalid slice index %v (index too large)", r) @@ -3407,7 +3407,7 @@ func stringtoruneslit(n ir.Node) ir.Node { var l []ir.Node i := 0 - for _, r := range n.Left().StringVal() { + for _, r := range ir.StringVal(n.Left()) { l = append(l, ir.Nod(ir.OKEY, nodintconst(int64(i)), nodintconst(int64(r)))) i++ } @@ -3803,7 +3803,7 @@ func deadcode(fn *ir.Func) { return } case ir.OFOR: - if !ir.IsConst(n.Left(), constant.Bool) || n.Left().BoolVal() { + if !ir.IsConst(n.Left(), constant.Bool) || ir.BoolVal(n.Left()) { return } default: @@ -3833,7 +3833,7 @@ func deadcodeslice(nn *ir.Nodes) { n.SetLeft(deadcodeexpr(n.Left())) if ir.IsConst(n.Left(), constant.Bool) { var body ir.Nodes - if n.Left().BoolVal() { + if ir.BoolVal(n.Left()) { n.SetRlist(ir.Nodes{}) body = n.Body() } else { @@ -3876,7 +3876,7 @@ func deadcodeexpr(n ir.Node) ir.Node { n.SetLeft(deadcodeexpr(n.Left())) n.SetRight(deadcodeexpr(n.Right())) if ir.IsConst(n.Left(), constant.Bool) { - if n.Left().BoolVal() { + if ir.BoolVal(n.Left()) { return n.Right() // true && x => x } else { return n.Left() // false && x => false @@ -3886,7 +3886,7 @@ func deadcodeexpr(n ir.Node) ir.Node { n.SetLeft(deadcodeexpr(n.Left())) n.SetRight(deadcodeexpr(n.Right())) if ir.IsConst(n.Left(), constant.Bool) { - if n.Left().BoolVal() { + if ir.BoolVal(n.Left()) { return n.Left() // true || x => true } else { return n.Right() // false || x => x diff --git a/src/cmd/compile/internal/gc/walk.go b/src/cmd/compile/internal/gc/walk.go index e72015c05e..ce7de1396b 100644 --- a/src/cmd/compile/internal/gc/walk.go +++ b/src/cmd/compile/internal/gc/walk.go @@ -1014,7 +1014,7 @@ opswitch: // The SSA backend will handle those. switch et { case types.TINT64: - c := n.Right().Int64Val() + c := ir.Int64Val(n.Right()) if c < 0 { c = -c } @@ -1022,7 +1022,7 @@ opswitch: break opswitch } case types.TUINT64: - c := n.Right().Uint64Val() + c := ir.Uint64Val(n.Right()) if c < 1<<16 { break opswitch } @@ -1072,7 +1072,7 @@ opswitch: base.Errorf("index out of bounds") } } else if ir.IsConst(n.Left(), constant.String) { - n.SetBounded(bounded(r, int64(len(n.Left().StringVal())))) + n.SetBounded(bounded(r, int64(len(ir.StringVal(n.Left()))))) if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Right(), constant.Int) { base.Warn("index bounds check elided") } @@ -1507,7 +1507,7 @@ opswitch: case ir.OSTR2BYTES: s := n.Left() if ir.IsConst(s, constant.String) { - sc := s.StringVal() + sc := ir.StringVal(s) // Allocate a [n]byte of the right size. t := types.NewArray(types.Types[types.TUINT8], int64(len(sc))) @@ -1936,7 +1936,7 @@ func walkprint(nn ir.Node, init *ir.Nodes) ir.Node { for i := 0; i < len(s); { var strs []string for i < len(s) && ir.IsConst(s[i], constant.String) { - strs = append(strs, s[i].StringVal()) + strs = append(strs, ir.StringVal(s[i])) i++ } if len(strs) > 0 { @@ -2016,7 +2016,7 @@ func walkprint(nn ir.Node, init *ir.Nodes) ir.Node { case types.TSTRING: cs := "" if ir.IsConst(n, constant.String) { - cs = n.StringVal() + cs = ir.StringVal(n) } switch cs { case " ": @@ -2673,7 +2673,7 @@ func addstr(n ir.Node, init *ir.Nodes) ir.Node { sz := int64(0) for _, n1 := range n.List().Slice() { if n1.Op() == ir.OLITERAL { - sz += int64(len(n1.StringVal())) + sz += int64(len(ir.StringVal(n1))) } } @@ -3467,7 +3467,7 @@ func walkcompare(n ir.Node, init *ir.Nodes) ir.Node { func tracecmpArg(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node { // Ugly hack to avoid "constant -1 overflows uintptr" errors, etc. - if n.Op() == ir.OLITERAL && n.Type().IsSigned() && n.Int64Val() < 0 { + if n.Op() == ir.OLITERAL && n.Type().IsSigned() && ir.Int64Val(n) < 0 { n = copyexpr(n, n.Type(), init) } @@ -3537,7 +3537,7 @@ func walkcompareString(n ir.Node, init *ir.Nodes) ir.Node { // Length-only checks are ok, though. maxRewriteLen = 0 } - if s := cs.StringVal(); len(s) <= maxRewriteLen { + if s := ir.StringVal(cs); len(s) <= maxRewriteLen { if len(s) > 0 { ncs = safeexpr(ncs, init) } @@ -3632,7 +3632,7 @@ func bounded(n ir.Node, max int64) bool { bits := int32(8 * n.Type().Width) if smallintconst(n) { - v := n.Int64Val() + v := ir.Int64Val(n) return 0 <= v && v < max } @@ -3641,9 +3641,9 @@ func bounded(n ir.Node, max int64) bool { v := int64(-1) switch { case smallintconst(n.Left()): - v = n.Left().Int64Val() + v = ir.Int64Val(n.Left()) case smallintconst(n.Right()): - v = n.Right().Int64Val() + v = ir.Int64Val(n.Right()) if n.Op() == ir.OANDNOT { v = ^v if !sign { @@ -3657,7 +3657,7 @@ func bounded(n ir.Node, max int64) bool { case ir.OMOD: if !sign && smallintconst(n.Right()) { - v := n.Right().Int64Val() + v := ir.Int64Val(n.Right()) if 0 <= v && v <= max { return true } @@ -3665,7 +3665,7 @@ func bounded(n ir.Node, max int64) bool { case ir.ODIV: if !sign && smallintconst(n.Right()) { - v := n.Right().Int64Val() + v := ir.Int64Val(n.Right()) for bits > 0 && v >= 2 { bits-- v >>= 1 @@ -3674,7 +3674,7 @@ func bounded(n ir.Node, max int64) bool { case ir.ORSH: if !sign && smallintconst(n.Right()) { - v := n.Right().Int64Val() + v := ir.Int64Val(n.Right()) if v > int64(bits) { return true } diff --git a/src/cmd/compile/internal/ir/name.go b/src/cmd/compile/internal/ir/name.go index 1d886bb9a1..aeeb63d2d6 100644 --- a/src/cmd/compile/internal/ir/name.go +++ b/src/cmd/compile/internal/ir/name.go @@ -296,62 +296,6 @@ func (n *Name) SetVal(v constant.Value) { n.val = v } -// Int64Val returns n as an int64. -// n must be an integer or rune constant. -func (n *Name) Int64Val() int64 { - if !IsConst(n, constant.Int) { - base.Fatalf("Int64Val(%v)", n) - } - x, ok := constant.Int64Val(n.Val()) - if !ok { - base.Fatalf("Int64Val(%v)", n) - } - return x -} - -// CanInt64 reports whether it is safe to call Int64Val() on n. -func (n *Name) CanInt64() bool { - if !IsConst(n, constant.Int) { - return false - } - - // if the value inside n cannot be represented as an int64, the - // return value of Int64 is undefined - _, ok := constant.Int64Val(n.Val()) - return ok -} - -// Uint64Val returns n as an uint64. -// n must be an integer or rune constant. -func (n *Name) Uint64Val() uint64 { - if !IsConst(n, constant.Int) { - base.Fatalf("Uint64Val(%v)", n) - } - x, ok := constant.Uint64Val(n.Val()) - if !ok { - base.Fatalf("Uint64Val(%v)", n) - } - return x -} - -// BoolVal returns n as a bool. -// n must be a boolean constant. -func (n *Name) BoolVal() bool { - if !IsConst(n, constant.Bool) { - base.Fatalf("BoolVal(%v)", n) - } - return constant.BoolVal(n.Val()) -} - -// StringVal returns the value of a literal string Node as a string. -// n must be a string constant. -func (n *Name) StringVal() string { - if !IsConst(n, constant.String) { - base.Fatalf("StringVal(%v)", n) - } - return constant.StringVal(n.Val()) -} - // The Class of a variable/function describes the "storage class" // of a variable or function. During parsing, storage classes are // called declaration contexts. diff --git a/src/cmd/compile/internal/ir/node.go b/src/cmd/compile/internal/ir/node.go index cc3ac5765d..42ba4cb0e9 100644 --- a/src/cmd/compile/internal/ir/node.go +++ b/src/cmd/compile/internal/ir/node.go @@ -87,11 +87,6 @@ type Node interface { MarkReadonly() Val() constant.Value SetVal(v constant.Value) - Int64Val() int64 - Uint64Val() uint64 - CanInt64() bool - BoolVal() bool - StringVal() string // Storage for analysis passes. Esc() uint16 diff --git a/src/cmd/compile/internal/ir/val.go b/src/cmd/compile/internal/ir/val.go index aae965bb4c..ad0df5508d 100644 --- a/src/cmd/compile/internal/ir/val.go +++ b/src/cmd/compile/internal/ir/val.go @@ -32,7 +32,7 @@ func ConstValue(n Node) interface{} { case constant.String: return constant.StringVal(v) case constant.Int: - return Int64Val(n.Type(), v) + return IntVal(n.Type(), v) case constant.Float: return Float64Val(v) case constant.Complex: @@ -42,7 +42,7 @@ func ConstValue(n Node) interface{} { // int64Val returns v converted to int64. // Note: if t is uint64, very large values will be converted to negative int64. -func Int64Val(t *types.Type, v constant.Value) int64 { +func IntVal(t *types.Type, v constant.Value) int64 { if t.IsUnsigned() { if x, ok := constant.Uint64Val(v); ok { return int64(x) @@ -118,3 +118,59 @@ func idealType(ct constant.Kind) *types.Type { } var OKForConst [types.NTYPE]bool + +// CanInt64 reports whether it is safe to call Int64Val() on n. +func CanInt64(n Node) bool { + if !IsConst(n, constant.Int) { + return false + } + + // if the value inside n cannot be represented as an int64, the + // return value of Int64 is undefined + _, ok := constant.Int64Val(n.Val()) + return ok +} + +// Int64Val returns n as an int64. +// n must be an integer or rune constant. +func Int64Val(n Node) int64 { + if !IsConst(n, constant.Int) { + base.Fatalf("Int64Val(%v)", n) + } + x, ok := constant.Int64Val(n.Val()) + if !ok { + base.Fatalf("Int64Val(%v)", n) + } + return x +} + +// Uint64Val returns n as an uint64. +// n must be an integer or rune constant. +func Uint64Val(n Node) uint64 { + if !IsConst(n, constant.Int) { + base.Fatalf("Uint64Val(%v)", n) + } + x, ok := constant.Uint64Val(n.Val()) + if !ok { + base.Fatalf("Uint64Val(%v)", n) + } + return x +} + +// BoolVal returns n as a bool. +// n must be a boolean constant. +func BoolVal(n Node) bool { + if !IsConst(n, constant.Bool) { + base.Fatalf("BoolVal(%v)", n) + } + return constant.BoolVal(n.Val()) +} + +// StringVal returns the value of a literal string Node as a string. +// n must be a string constant. +func StringVal(n Node) string { + if !IsConst(n, constant.String) { + base.Fatalf("StringVal(%v)", n) + } + return constant.StringVal(n.Val()) +}