mirror of https://github.com/golang/go.git
cmd/compile: normalize spaces in rewrite rule comments.
In addition to look nicer to the eye, this allows to reformat and indent rules without causing spurious changes to the generated file, making it easier to spot functional changes. After this CL, all CLs that will aggregate rules through the new "|" functionality should cause no changes to the generated files. Change-Id: Icec283585ba8d7b91c79d76513c1d83dca4b30aa Reviewed-on: https://go-review.googlesource.com/95216 Run-TryBot: Giovanni Bajo <rasky@develer.com> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: Brad Fitzpatrick <bradfitz@golang.org>
This commit is contained in:
parent
4dc25ceda4
commit
70fd25e4e1
|
|
@ -60,18 +60,22 @@ func (r Rule) String() string {
|
|||
return fmt.Sprintf("rule %q at %s", r.rule, r.loc)
|
||||
}
|
||||
|
||||
func normalizeSpaces(s string) string {
|
||||
return strings.Join(strings.Fields(strings.TrimSpace(s)), " ")
|
||||
}
|
||||
|
||||
// parse returns the matching part of the rule, additional conditions, and the result.
|
||||
func (r Rule) parse() (match, cond, result string) {
|
||||
s := strings.Split(r.rule, "->")
|
||||
if len(s) != 2 {
|
||||
log.Fatalf("no arrow in %s", r)
|
||||
}
|
||||
match = strings.TrimSpace(s[0])
|
||||
result = strings.TrimSpace(s[1])
|
||||
match = normalizeSpaces(s[0])
|
||||
result = normalizeSpaces(s[1])
|
||||
cond = ""
|
||||
if i := strings.Index(match, "&&"); i >= 0 {
|
||||
cond = strings.TrimSpace(match[i+2:])
|
||||
match = strings.TrimSpace(match[:i])
|
||||
cond = normalizeSpaces(match[i+2:])
|
||||
match = normalizeSpaces(match[:i])
|
||||
}
|
||||
return match, cond, result
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -3144,7 +3144,7 @@ func rewriteValueARM_OpARMADDshiftRL_0(v *Value) bool {
|
|||
}
|
||||
// match: (ADDshiftRL [c] (SLLconst x [32-c]) x)
|
||||
// cond:
|
||||
// result: (SRRconst [ c] x)
|
||||
// result: (SRRconst [ c] x)
|
||||
for {
|
||||
c := v.AuxInt
|
||||
_ = v.Args[1]
|
||||
|
|
@ -12127,7 +12127,7 @@ func rewriteValueARM_OpARMORshiftRL_0(v *Value) bool {
|
|||
}
|
||||
// match: (ORshiftRL [c] (SLLconst x [32-c]) x)
|
||||
// cond:
|
||||
// result: (SRRconst [ c] x)
|
||||
// result: (SRRconst [ c] x)
|
||||
for {
|
||||
c := v.AuxInt
|
||||
_ = v.Args[1]
|
||||
|
|
@ -17389,7 +17389,7 @@ func rewriteValueARM_OpARMXORshiftRL_0(v *Value) bool {
|
|||
}
|
||||
// match: (XORshiftRL [c] (SLLconst x [32-c]) x)
|
||||
// cond:
|
||||
// result: (SRRconst [ c] x)
|
||||
// result: (SRRconst [ c] x)
|
||||
for {
|
||||
c := v.AuxInt
|
||||
_ = v.Args[1]
|
||||
|
|
@ -17746,7 +17746,7 @@ func rewriteValueARM_OpBswap32_0(v *Value) bool {
|
|||
_ = b
|
||||
// match: (Bswap32 <t> x)
|
||||
// cond: objabi.GOARM==5
|
||||
// result: (XOR <t> (SRLconst <t> (BICconst <t> (XOR <t> x (SRRconst <t> [16] x)) [0xff0000]) [8]) (SRRconst <t> x [8]))
|
||||
// result: (XOR <t> (SRLconst <t> (BICconst <t> (XOR <t> x (SRRconst <t> [16] x)) [0xff0000]) [8]) (SRRconst <t> x [8]))
|
||||
for {
|
||||
t := v.Type
|
||||
x := v.Args[0]
|
||||
|
|
@ -18136,7 +18136,7 @@ func rewriteValueARM_OpDiv32_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Div32 x y)
|
||||
// cond:
|
||||
// result: (SUB (XOR <typ.UInt32> (Select0 <typ.UInt32> (CALLudiv (SUB <typ.UInt32> (XOR x <typ.UInt32> (Signmask x)) (Signmask x)) (SUB <typ.UInt32> (XOR y <typ.UInt32> (Signmask y)) (Signmask y)))) (Signmask (XOR <typ.UInt32> x y))) (Signmask (XOR <typ.UInt32> x y)))
|
||||
// result: (SUB (XOR <typ.UInt32> (Select0 <typ.UInt32> (CALLudiv (SUB <typ.UInt32> (XOR x <typ.UInt32> (Signmask x)) (Signmask x)) (SUB <typ.UInt32> (XOR y <typ.UInt32> (Signmask y)) (Signmask y)))) (Signmask (XOR <typ.UInt32> x y))) (Signmask (XOR <typ.UInt32> x y)))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -19720,7 +19720,7 @@ func rewriteValueARM_OpMod32_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Mod32 x y)
|
||||
// cond:
|
||||
// result: (SUB (XOR <typ.UInt32> (Select1 <typ.UInt32> (CALLudiv (SUB <typ.UInt32> (XOR <typ.UInt32> x (Signmask x)) (Signmask x)) (SUB <typ.UInt32> (XOR <typ.UInt32> y (Signmask y)) (Signmask y)))) (Signmask x)) (Signmask x))
|
||||
// result: (SUB (XOR <typ.UInt32> (Select1 <typ.UInt32> (CALLudiv (SUB <typ.UInt32> (XOR <typ.UInt32> x (Signmask x)) (Signmask x)) (SUB <typ.UInt32> (XOR <typ.UInt32> y (Signmask y)) (Signmask y)))) (Signmask x)) (Signmask x))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -19895,7 +19895,7 @@ func rewriteValueARM_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [2] dst src mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))
|
||||
// result: (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))
|
||||
for {
|
||||
if v.AuxInt != 2 {
|
||||
break
|
||||
|
|
@ -19948,7 +19948,7 @@ func rewriteValueARM_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [4] {t} dst src mem)
|
||||
// cond: t.(*types.Type).Alignment()%2 == 0
|
||||
// result: (MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
|
||||
// result: (MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
|
||||
for {
|
||||
if v.AuxInt != 4 {
|
||||
break
|
||||
|
|
@ -19981,7 +19981,7 @@ func rewriteValueARM_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [4] dst src mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))))
|
||||
// result: (MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))))
|
||||
for {
|
||||
if v.AuxInt != 4 {
|
||||
break
|
||||
|
|
@ -20028,7 +20028,7 @@ func rewriteValueARM_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [3] dst src mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)))
|
||||
// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)))
|
||||
for {
|
||||
if v.AuxInt != 3 {
|
||||
break
|
||||
|
|
@ -20065,7 +20065,7 @@ func rewriteValueARM_OpMove_0(v *Value) bool {
|
|||
return true
|
||||
}
|
||||
// match: (Move [s] {t} dst src mem)
|
||||
// cond: s%4 == 0 && s > 4 && s <= 512 && t.(*types.Type).Alignment()%4 == 0 && !config.noDuffDevice
|
||||
// cond: s%4 == 0 && s > 4 && s <= 512 && t.(*types.Type).Alignment()%4 == 0 && !config.noDuffDevice
|
||||
// result: (DUFFCOPY [8 * (128 - int64(s/4))] dst src mem)
|
||||
for {
|
||||
s := v.AuxInt
|
||||
|
|
@ -20086,7 +20086,7 @@ func rewriteValueARM_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [s] {t} dst src mem)
|
||||
// cond: (s > 512 || config.noDuffDevice) || t.(*types.Type).Alignment()%4 != 0
|
||||
// result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
|
||||
// result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
|
||||
for {
|
||||
s := v.AuxInt
|
||||
t := v.Aux
|
||||
|
|
@ -21834,7 +21834,7 @@ func rewriteValueARM_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [2] ptr mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))
|
||||
// result: (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))
|
||||
for {
|
||||
if v.AuxInt != 2 {
|
||||
break
|
||||
|
|
@ -21882,7 +21882,7 @@ func rewriteValueARM_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [4] {t} ptr mem)
|
||||
// cond: t.(*types.Type).Alignment()%2 == 0
|
||||
// result: (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem))
|
||||
// result: (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem))
|
||||
for {
|
||||
if v.AuxInt != 4 {
|
||||
break
|
||||
|
|
@ -21912,7 +21912,7 @@ func rewriteValueARM_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [4] ptr mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))))
|
||||
// result: (MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))))
|
||||
for {
|
||||
if v.AuxInt != 4 {
|
||||
break
|
||||
|
|
@ -21952,7 +21952,7 @@ func rewriteValueARM_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [3] ptr mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)))
|
||||
// result: (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)))
|
||||
for {
|
||||
if v.AuxInt != 3 {
|
||||
break
|
||||
|
|
@ -21984,7 +21984,7 @@ func rewriteValueARM_OpZero_0(v *Value) bool {
|
|||
return true
|
||||
}
|
||||
// match: (Zero [s] {t} ptr mem)
|
||||
// cond: s%4 == 0 && s > 4 && s <= 512 && t.(*types.Type).Alignment()%4 == 0 && !config.noDuffDevice
|
||||
// cond: s%4 == 0 && s > 4 && s <= 512 && t.(*types.Type).Alignment()%4 == 0 && !config.noDuffDevice
|
||||
// result: (DUFFZERO [4 * (128 - int64(s/4))] ptr (MOVWconst [0]) mem)
|
||||
for {
|
||||
s := v.AuxInt
|
||||
|
|
@ -22006,7 +22006,7 @@ func rewriteValueARM_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [s] {t} ptr mem)
|
||||
// cond: (s > 512 || config.noDuffDevice) || t.(*types.Type).Alignment()%4 != 0
|
||||
// result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) (MOVWconst [0]) mem)
|
||||
// result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) (MOVWconst [0]) mem)
|
||||
for {
|
||||
s := v.AuxInt
|
||||
t := v.Aux
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -711,7 +711,7 @@ func rewriteValueMIPS_OpAtomicAnd8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (AtomicAnd8 ptr val mem)
|
||||
// cond: !config.BigEndian
|
||||
// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))))) mem)
|
||||
// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))))) mem)
|
||||
for {
|
||||
_ = v.Args[2]
|
||||
ptr := v.Args[0]
|
||||
|
|
@ -761,7 +761,7 @@ func rewriteValueMIPS_OpAtomicAnd8_0(v *Value) bool {
|
|||
}
|
||||
// match: (AtomicAnd8 ptr val mem)
|
||||
// cond: config.BigEndian
|
||||
// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))))) mem)
|
||||
// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))))) mem)
|
||||
for {
|
||||
_ = v.Args[2]
|
||||
ptr := v.Args[0]
|
||||
|
|
@ -868,7 +868,7 @@ func rewriteValueMIPS_OpAtomicLoad32_0(v *Value) bool {
|
|||
func rewriteValueMIPS_OpAtomicLoadPtr_0(v *Value) bool {
|
||||
// match: (AtomicLoadPtr ptr mem)
|
||||
// cond:
|
||||
// result: (LoweredAtomicLoad ptr mem)
|
||||
// result: (LoweredAtomicLoad ptr mem)
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
ptr := v.Args[0]
|
||||
|
|
@ -888,7 +888,7 @@ func rewriteValueMIPS_OpAtomicOr8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (AtomicOr8 ptr val mem)
|
||||
// cond: !config.BigEndian
|
||||
// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) mem)
|
||||
// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) mem)
|
||||
for {
|
||||
_ = v.Args[2]
|
||||
ptr := v.Args[0]
|
||||
|
|
@ -921,7 +921,7 @@ func rewriteValueMIPS_OpAtomicOr8_0(v *Value) bool {
|
|||
}
|
||||
// match: (AtomicOr8 ptr val mem)
|
||||
// cond: config.BigEndian
|
||||
// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))) mem)
|
||||
// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))) mem)
|
||||
for {
|
||||
_ = v.Args[2]
|
||||
ptr := v.Args[0]
|
||||
|
|
@ -976,7 +976,7 @@ func rewriteValueMIPS_OpAtomicStore32_0(v *Value) bool {
|
|||
func rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v *Value) bool {
|
||||
// match: (AtomicStorePtrNoWB ptr val mem)
|
||||
// cond:
|
||||
// result: (LoweredAtomicStore ptr val mem)
|
||||
// result: (LoweredAtomicStore ptr val mem)
|
||||
for {
|
||||
_ = v.Args[2]
|
||||
ptr := v.Args[0]
|
||||
|
|
@ -3518,7 +3518,7 @@ func rewriteValueMIPS_OpMIPSMOVBUreg_0(v *Value) bool {
|
|||
func rewriteValueMIPS_OpMIPSMOVBload_0(v *Value) bool {
|
||||
// match: (MOVBload [off1] {sym} x:(ADDconst [off2] ptr) mem)
|
||||
// cond: (is16Bit(off1+off2) || x.Uses == 1)
|
||||
// result: (MOVBload [off1+off2] {sym} ptr mem)
|
||||
// result: (MOVBload [off1+off2] {sym} ptr mem)
|
||||
for {
|
||||
off1 := v.AuxInt
|
||||
sym := v.Aux
|
||||
|
|
@ -3923,7 +3923,7 @@ func rewriteValueMIPS_OpMIPSMOVBstorezero_0(v *Value) bool {
|
|||
func rewriteValueMIPS_OpMIPSMOVDload_0(v *Value) bool {
|
||||
// match: (MOVDload [off1] {sym} x:(ADDconst [off2] ptr) mem)
|
||||
// cond: (is16Bit(off1+off2) || x.Uses == 1)
|
||||
// result: (MOVDload [off1+off2] {sym} ptr mem)
|
||||
// result: (MOVDload [off1+off2] {sym} ptr mem)
|
||||
for {
|
||||
off1 := v.AuxInt
|
||||
sym := v.Aux
|
||||
|
|
@ -4056,7 +4056,7 @@ func rewriteValueMIPS_OpMIPSMOVDstore_0(v *Value) bool {
|
|||
func rewriteValueMIPS_OpMIPSMOVFload_0(v *Value) bool {
|
||||
// match: (MOVFload [off1] {sym} x:(ADDconst [off2] ptr) mem)
|
||||
// cond: (is16Bit(off1+off2) || x.Uses == 1)
|
||||
// result: (MOVFload [off1+off2] {sym} ptr mem)
|
||||
// result: (MOVFload [off1+off2] {sym} ptr mem)
|
||||
for {
|
||||
off1 := v.AuxInt
|
||||
sym := v.Aux
|
||||
|
|
@ -4375,7 +4375,7 @@ func rewriteValueMIPS_OpMIPSMOVHUreg_0(v *Value) bool {
|
|||
func rewriteValueMIPS_OpMIPSMOVHload_0(v *Value) bool {
|
||||
// match: (MOVHload [off1] {sym} x:(ADDconst [off2] ptr) mem)
|
||||
// cond: (is16Bit(off1+off2) || x.Uses == 1)
|
||||
// result: (MOVHload [off1+off2] {sym} ptr mem)
|
||||
// result: (MOVHload [off1+off2] {sym} ptr mem)
|
||||
for {
|
||||
off1 := v.AuxInt
|
||||
sym := v.Aux
|
||||
|
|
@ -4786,7 +4786,7 @@ func rewriteValueMIPS_OpMIPSMOVHstorezero_0(v *Value) bool {
|
|||
func rewriteValueMIPS_OpMIPSMOVWload_0(v *Value) bool {
|
||||
// match: (MOVWload [off1] {sym} x:(ADDconst [off2] ptr) mem)
|
||||
// cond: (is16Bit(off1+off2) || x.Uses == 1)
|
||||
// result: (MOVWload [off1+off2] {sym} ptr mem)
|
||||
// result: (MOVWload [off1+off2] {sym} ptr mem)
|
||||
for {
|
||||
off1 := v.AuxInt
|
||||
sym := v.Aux
|
||||
|
|
@ -5295,7 +5295,7 @@ func rewriteValueMIPS_OpMIPSOR_0(v *Value) bool {
|
|||
_ = b
|
||||
// match: (OR x (MOVWconst [c]))
|
||||
// cond:
|
||||
// result: (ORconst [c] x)
|
||||
// result: (ORconst [c] x)
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -5311,7 +5311,7 @@ func rewriteValueMIPS_OpMIPSOR_0(v *Value) bool {
|
|||
}
|
||||
// match: (OR (MOVWconst [c]) x)
|
||||
// cond:
|
||||
// result: (ORconst [c] x)
|
||||
// result: (ORconst [c] x)
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
|
|
@ -5445,7 +5445,7 @@ func rewriteValueMIPS_OpMIPSORconst_0(v *Value) bool {
|
|||
func rewriteValueMIPS_OpMIPSSGT_0(v *Value) bool {
|
||||
// match: (SGT (MOVWconst [c]) x)
|
||||
// cond:
|
||||
// result: (SGTconst [c] x)
|
||||
// result: (SGTconst [c] x)
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
|
|
@ -6469,7 +6469,7 @@ func rewriteValueMIPS_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [2] dst src mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))
|
||||
// result: (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))
|
||||
for {
|
||||
if v.AuxInt != 2 {
|
||||
break
|
||||
|
|
@ -6522,7 +6522,7 @@ func rewriteValueMIPS_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [4] {t} dst src mem)
|
||||
// cond: t.(*types.Type).Alignment()%2 == 0
|
||||
// result: (MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
|
||||
// result: (MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
|
||||
for {
|
||||
if v.AuxInt != 4 {
|
||||
break
|
||||
|
|
@ -6555,7 +6555,7 @@ func rewriteValueMIPS_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [4] dst src mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))))
|
||||
// result: (MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))))
|
||||
for {
|
||||
if v.AuxInt != 4 {
|
||||
break
|
||||
|
|
@ -6602,7 +6602,7 @@ func rewriteValueMIPS_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [3] dst src mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)))
|
||||
// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)))
|
||||
for {
|
||||
if v.AuxInt != 3 {
|
||||
break
|
||||
|
|
@ -6640,7 +6640,7 @@ func rewriteValueMIPS_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [8] {t} dst src mem)
|
||||
// cond: t.(*types.Type).Alignment()%4 == 0
|
||||
// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
|
||||
// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
|
||||
for {
|
||||
if v.AuxInt != 8 {
|
||||
break
|
||||
|
|
@ -6673,7 +6673,7 @@ func rewriteValueMIPS_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [8] {t} dst src mem)
|
||||
// cond: t.(*types.Type).Alignment()%2 == 0
|
||||
// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
|
||||
// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
|
||||
for {
|
||||
if v.AuxInt != 8 {
|
||||
break
|
||||
|
|
@ -6733,7 +6733,7 @@ func rewriteValueMIPS_OpMove_10(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Move [6] {t} dst src mem)
|
||||
// cond: t.(*types.Type).Alignment()%2 == 0
|
||||
// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
|
||||
// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
|
||||
for {
|
||||
if v.AuxInt != 6 {
|
||||
break
|
||||
|
|
@ -6775,7 +6775,7 @@ func rewriteValueMIPS_OpMove_10(v *Value) bool {
|
|||
}
|
||||
// match: (Move [12] {t} dst src mem)
|
||||
// cond: t.(*types.Type).Alignment()%4 == 0
|
||||
// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
|
||||
// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
|
||||
for {
|
||||
if v.AuxInt != 12 {
|
||||
break
|
||||
|
|
@ -6817,7 +6817,7 @@ func rewriteValueMIPS_OpMove_10(v *Value) bool {
|
|||
}
|
||||
// match: (Move [16] {t} dst src mem)
|
||||
// cond: t.(*types.Type).Alignment()%4 == 0
|
||||
// result: (MOVWstore [12] dst (MOVWload [12] src mem) (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))))
|
||||
// result: (MOVWstore [12] dst (MOVWload [12] src mem) (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))))
|
||||
for {
|
||||
if v.AuxInt != 16 {
|
||||
break
|
||||
|
|
@ -6868,7 +6868,7 @@ func rewriteValueMIPS_OpMove_10(v *Value) bool {
|
|||
}
|
||||
// match: (Move [s] {t} dst src mem)
|
||||
// cond: (s > 16 || t.(*types.Type).Alignment()%4 != 0)
|
||||
// result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
|
||||
// result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
|
||||
for {
|
||||
s := v.AuxInt
|
||||
t := v.Aux
|
||||
|
|
@ -9236,7 +9236,7 @@ func rewriteValueMIPS_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [2] ptr mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))
|
||||
// result: (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))
|
||||
for {
|
||||
if v.AuxInt != 2 {
|
||||
break
|
||||
|
|
@ -9284,7 +9284,7 @@ func rewriteValueMIPS_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [4] {t} ptr mem)
|
||||
// cond: t.(*types.Type).Alignment()%2 == 0
|
||||
// result: (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem))
|
||||
// result: (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem))
|
||||
for {
|
||||
if v.AuxInt != 4 {
|
||||
break
|
||||
|
|
@ -9314,7 +9314,7 @@ func rewriteValueMIPS_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [4] ptr mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))))
|
||||
// result: (MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))))
|
||||
for {
|
||||
if v.AuxInt != 4 {
|
||||
break
|
||||
|
|
@ -9354,7 +9354,7 @@ func rewriteValueMIPS_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [3] ptr mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)))
|
||||
// result: (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)))
|
||||
for {
|
||||
if v.AuxInt != 3 {
|
||||
break
|
||||
|
|
@ -9387,7 +9387,7 @@ func rewriteValueMIPS_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [6] {t} ptr mem)
|
||||
// cond: t.(*types.Type).Alignment()%2 == 0
|
||||
// result: (MOVHstore [4] ptr (MOVWconst [0]) (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem)))
|
||||
// result: (MOVHstore [4] ptr (MOVWconst [0]) (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem)))
|
||||
for {
|
||||
if v.AuxInt != 6 {
|
||||
break
|
||||
|
|
@ -9424,7 +9424,7 @@ func rewriteValueMIPS_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [8] {t} ptr mem)
|
||||
// cond: t.(*types.Type).Alignment()%4 == 0
|
||||
// result: (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))
|
||||
// result: (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))
|
||||
for {
|
||||
if v.AuxInt != 8 {
|
||||
break
|
||||
|
|
@ -9463,7 +9463,7 @@ func rewriteValueMIPS_OpZero_10(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Zero [12] {t} ptr mem)
|
||||
// cond: t.(*types.Type).Alignment()%4 == 0
|
||||
// result: (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem)))
|
||||
// result: (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem)))
|
||||
for {
|
||||
if v.AuxInt != 12 {
|
||||
break
|
||||
|
|
@ -9500,7 +9500,7 @@ func rewriteValueMIPS_OpZero_10(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [16] {t} ptr mem)
|
||||
// cond: t.(*types.Type).Alignment()%4 == 0
|
||||
// result: (MOVWstore [12] ptr (MOVWconst [0]) (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))))
|
||||
// result: (MOVWstore [12] ptr (MOVWconst [0]) (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))))
|
||||
for {
|
||||
if v.AuxInt != 16 {
|
||||
break
|
||||
|
|
@ -9543,8 +9543,8 @@ func rewriteValueMIPS_OpZero_10(v *Value) bool {
|
|||
return true
|
||||
}
|
||||
// match: (Zero [s] {t} ptr mem)
|
||||
// cond: (s > 16 || t.(*types.Type).Alignment()%4 != 0)
|
||||
// result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
|
||||
// cond: (s > 16 || t.(*types.Type).Alignment()%4 != 0)
|
||||
// result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
|
||||
for {
|
||||
s := v.AuxInt
|
||||
t := v.Aux
|
||||
|
|
@ -9893,7 +9893,7 @@ func rewriteBlockMIPS(b *Block) bool {
|
|||
return true
|
||||
}
|
||||
// match: (GEZ (MOVWconst [c]) yes no)
|
||||
// cond: int32(c) < 0
|
||||
// cond: int32(c) < 0
|
||||
// result: (First nil no yes)
|
||||
for {
|
||||
v := b.Control
|
||||
|
|
@ -9912,7 +9912,7 @@ func rewriteBlockMIPS(b *Block) bool {
|
|||
}
|
||||
case BlockMIPSGTZ:
|
||||
// match: (GTZ (MOVWconst [c]) yes no)
|
||||
// cond: int32(c) > 0
|
||||
// cond: int32(c) > 0
|
||||
// result: (First nil yes no)
|
||||
for {
|
||||
v := b.Control
|
||||
|
|
@ -9978,7 +9978,7 @@ func rewriteBlockMIPS(b *Block) bool {
|
|||
return true
|
||||
}
|
||||
// match: (LEZ (MOVWconst [c]) yes no)
|
||||
// cond: int32(c) > 0
|
||||
// cond: int32(c) > 0
|
||||
// result: (First nil no yes)
|
||||
for {
|
||||
v := b.Control
|
||||
|
|
@ -9997,7 +9997,7 @@ func rewriteBlockMIPS(b *Block) bool {
|
|||
}
|
||||
case BlockMIPSLTZ:
|
||||
// match: (LTZ (MOVWconst [c]) yes no)
|
||||
// cond: int32(c) < 0
|
||||
// cond: int32(c) < 0
|
||||
// result: (First nil yes no)
|
||||
for {
|
||||
v := b.Control
|
||||
|
|
|
|||
|
|
@ -3029,7 +3029,7 @@ func rewriteValueMIPS64_OpLsh16x8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Lsh16x8 <t> x y)
|
||||
// cond:
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -3157,7 +3157,7 @@ func rewriteValueMIPS64_OpLsh32x8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Lsh32x8 <t> x y)
|
||||
// cond:
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -3285,7 +3285,7 @@ func rewriteValueMIPS64_OpLsh64x8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Lsh64x8 <t> x y)
|
||||
// cond:
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -3413,7 +3413,7 @@ func rewriteValueMIPS64_OpLsh8x8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Lsh8x8 <t> x y)
|
||||
// cond:
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -3898,7 +3898,7 @@ func rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v *Value) bool {
|
|||
func rewriteValueMIPS64_OpMIPS64MOVBload_0(v *Value) bool {
|
||||
// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
|
||||
// cond: is32Bit(off1+off2)
|
||||
// result: (MOVBload [off1+off2] {sym} ptr mem)
|
||||
// result: (MOVBload [off1+off2] {sym} ptr mem)
|
||||
for {
|
||||
off1 := v.AuxInt
|
||||
sym := v.Aux
|
||||
|
|
@ -4254,7 +4254,7 @@ func rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v *Value) bool {
|
|||
func rewriteValueMIPS64_OpMIPS64MOVDload_0(v *Value) bool {
|
||||
// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
|
||||
// cond: is32Bit(off1+off2)
|
||||
// result: (MOVDload [off1+off2] {sym} ptr mem)
|
||||
// result: (MOVDload [off1+off2] {sym} ptr mem)
|
||||
for {
|
||||
off1 := v.AuxInt
|
||||
sym := v.Aux
|
||||
|
|
@ -4362,7 +4362,7 @@ func rewriteValueMIPS64_OpMIPS64MOVDstore_0(v *Value) bool {
|
|||
func rewriteValueMIPS64_OpMIPS64MOVFload_0(v *Value) bool {
|
||||
// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
|
||||
// cond: is32Bit(off1+off2)
|
||||
// result: (MOVFload [off1+off2] {sym} ptr mem)
|
||||
// result: (MOVFload [off1+off2] {sym} ptr mem)
|
||||
for {
|
||||
off1 := v.AuxInt
|
||||
sym := v.Aux
|
||||
|
|
@ -4588,7 +4588,7 @@ func rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v *Value) bool {
|
|||
func rewriteValueMIPS64_OpMIPS64MOVHload_0(v *Value) bool {
|
||||
// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
|
||||
// cond: is32Bit(off1+off2)
|
||||
// result: (MOVHload [off1+off2] {sym} ptr mem)
|
||||
// result: (MOVHload [off1+off2] {sym} ptr mem)
|
||||
for {
|
||||
off1 := v.AuxInt
|
||||
sym := v.Aux
|
||||
|
|
@ -4950,7 +4950,7 @@ func rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v *Value) bool {
|
|||
func rewriteValueMIPS64_OpMIPS64MOVVload_0(v *Value) bool {
|
||||
// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
|
||||
// cond: is32Bit(off1+off2)
|
||||
// result: (MOVVload [off1+off2] {sym} ptr mem)
|
||||
// result: (MOVVload [off1+off2] {sym} ptr mem)
|
||||
for {
|
||||
off1 := v.AuxInt
|
||||
sym := v.Aux
|
||||
|
|
@ -5304,7 +5304,7 @@ func rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v *Value) bool {
|
|||
func rewriteValueMIPS64_OpMIPS64MOVWload_0(v *Value) bool {
|
||||
// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
|
||||
// cond: is32Bit(off1+off2)
|
||||
// result: (MOVWload [off1+off2] {sym} ptr mem)
|
||||
// result: (MOVWload [off1+off2] {sym} ptr mem)
|
||||
for {
|
||||
off1 := v.AuxInt
|
||||
sym := v.Aux
|
||||
|
|
@ -5749,7 +5749,7 @@ func rewriteValueMIPS64_OpMIPS64NORconst_0(v *Value) bool {
|
|||
func rewriteValueMIPS64_OpMIPS64OR_0(v *Value) bool {
|
||||
// match: (OR x (MOVVconst [c]))
|
||||
// cond: is32Bit(c)
|
||||
// result: (ORconst [c] x)
|
||||
// result: (ORconst [c] x)
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -5768,7 +5768,7 @@ func rewriteValueMIPS64_OpMIPS64OR_0(v *Value) bool {
|
|||
}
|
||||
// match: (OR (MOVVconst [c]) x)
|
||||
// cond: is32Bit(c)
|
||||
// result: (ORconst [c] x)
|
||||
// result: (ORconst [c] x)
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
|
|
@ -5864,7 +5864,7 @@ func rewriteValueMIPS64_OpMIPS64ORconst_0(v *Value) bool {
|
|||
func rewriteValueMIPS64_OpMIPS64SGT_0(v *Value) bool {
|
||||
// match: (SGT (MOVVconst [c]) x)
|
||||
// cond: is32Bit(c)
|
||||
// result: (SGTconst [c] x)
|
||||
// result: (SGTconst [c] x)
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
|
|
@ -6872,7 +6872,7 @@ func rewriteValueMIPS64_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [2] dst src mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
|
||||
// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
|
||||
for {
|
||||
if v.AuxInt != 2 {
|
||||
break
|
||||
|
|
@ -6925,7 +6925,7 @@ func rewriteValueMIPS64_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [4] {t} dst src mem)
|
||||
// cond: t.(*types.Type).Alignment()%2 == 0
|
||||
// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
|
||||
// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
|
||||
for {
|
||||
if v.AuxInt != 4 {
|
||||
break
|
||||
|
|
@ -6958,7 +6958,7 @@ func rewriteValueMIPS64_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [4] dst src mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
|
||||
// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
|
||||
for {
|
||||
if v.AuxInt != 4 {
|
||||
break
|
||||
|
|
@ -7029,7 +7029,7 @@ func rewriteValueMIPS64_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [8] {t} dst src mem)
|
||||
// cond: t.(*types.Type).Alignment()%4 == 0
|
||||
// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
|
||||
// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
|
||||
for {
|
||||
if v.AuxInt != 8 {
|
||||
break
|
||||
|
|
@ -7062,7 +7062,7 @@ func rewriteValueMIPS64_OpMove_0(v *Value) bool {
|
|||
}
|
||||
// match: (Move [8] {t} dst src mem)
|
||||
// cond: t.(*types.Type).Alignment()%2 == 0
|
||||
// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
|
||||
// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
|
||||
for {
|
||||
if v.AuxInt != 8 {
|
||||
break
|
||||
|
|
@ -7122,7 +7122,7 @@ func rewriteValueMIPS64_OpMove_10(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Move [3] dst src mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
|
||||
// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
|
||||
for {
|
||||
if v.AuxInt != 3 {
|
||||
break
|
||||
|
|
@ -7160,7 +7160,7 @@ func rewriteValueMIPS64_OpMove_10(v *Value) bool {
|
|||
}
|
||||
// match: (Move [6] {t} dst src mem)
|
||||
// cond: t.(*types.Type).Alignment()%2 == 0
|
||||
// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
|
||||
// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
|
||||
for {
|
||||
if v.AuxInt != 6 {
|
||||
break
|
||||
|
|
@ -7202,7 +7202,7 @@ func rewriteValueMIPS64_OpMove_10(v *Value) bool {
|
|||
}
|
||||
// match: (Move [12] {t} dst src mem)
|
||||
// cond: t.(*types.Type).Alignment()%4 == 0
|
||||
// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
|
||||
// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
|
||||
for {
|
||||
if v.AuxInt != 12 {
|
||||
break
|
||||
|
|
@ -7244,7 +7244,7 @@ func rewriteValueMIPS64_OpMove_10(v *Value) bool {
|
|||
}
|
||||
// match: (Move [16] {t} dst src mem)
|
||||
// cond: t.(*types.Type).Alignment()%8 == 0
|
||||
// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
|
||||
// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
|
||||
for {
|
||||
if v.AuxInt != 16 {
|
||||
break
|
||||
|
|
@ -7277,7 +7277,7 @@ func rewriteValueMIPS64_OpMove_10(v *Value) bool {
|
|||
}
|
||||
// match: (Move [24] {t} dst src mem)
|
||||
// cond: t.(*types.Type).Alignment()%8 == 0
|
||||
// result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)))
|
||||
// result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)))
|
||||
for {
|
||||
if v.AuxInt != 24 {
|
||||
break
|
||||
|
|
@ -7319,7 +7319,7 @@ func rewriteValueMIPS64_OpMove_10(v *Value) bool {
|
|||
}
|
||||
// match: (Move [s] {t} dst src mem)
|
||||
// cond: s > 24 || t.(*types.Type).Alignment()%8 != 0
|
||||
// result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
|
||||
// result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
|
||||
for {
|
||||
s := v.AuxInt
|
||||
t := v.Aux
|
||||
|
|
@ -7949,7 +7949,7 @@ func rewriteValueMIPS64_OpRsh16Ux8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Rsh16Ux8 <t> x y)
|
||||
// cond:
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -8085,7 +8085,7 @@ func rewriteValueMIPS64_OpRsh16x8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Rsh16x8 <t> x y)
|
||||
// cond:
|
||||
// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
|
||||
// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -8221,7 +8221,7 @@ func rewriteValueMIPS64_OpRsh32Ux8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Rsh32Ux8 <t> x y)
|
||||
// cond:
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -8357,7 +8357,7 @@ func rewriteValueMIPS64_OpRsh32x8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Rsh32x8 <t> x y)
|
||||
// cond:
|
||||
// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
|
||||
// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -8487,7 +8487,7 @@ func rewriteValueMIPS64_OpRsh64Ux8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Rsh64Ux8 <t> x y)
|
||||
// cond:
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -8615,7 +8615,7 @@ func rewriteValueMIPS64_OpRsh64x8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Rsh64x8 <t> x y)
|
||||
// cond:
|
||||
// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
|
||||
// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -8749,7 +8749,7 @@ func rewriteValueMIPS64_OpRsh8Ux8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Rsh8Ux8 <t> x y)
|
||||
// cond:
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
|
||||
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -8885,7 +8885,7 @@ func rewriteValueMIPS64_OpRsh8x8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Rsh8x8 <t> x y)
|
||||
// cond:
|
||||
// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
|
||||
// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -10009,7 +10009,7 @@ func rewriteValueMIPS64_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [2] ptr mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))
|
||||
// result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))
|
||||
for {
|
||||
if v.AuxInt != 2 {
|
||||
break
|
||||
|
|
@ -10057,7 +10057,7 @@ func rewriteValueMIPS64_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [4] {t} ptr mem)
|
||||
// cond: t.(*types.Type).Alignment()%2 == 0
|
||||
// result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))
|
||||
// result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))
|
||||
for {
|
||||
if v.AuxInt != 4 {
|
||||
break
|
||||
|
|
@ -10087,7 +10087,7 @@ func rewriteValueMIPS64_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [4] ptr mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))))
|
||||
// result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))))
|
||||
for {
|
||||
if v.AuxInt != 4 {
|
||||
break
|
||||
|
|
@ -10149,7 +10149,7 @@ func rewriteValueMIPS64_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [8] {t} ptr mem)
|
||||
// cond: t.(*types.Type).Alignment()%4 == 0
|
||||
// result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))
|
||||
// result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))
|
||||
for {
|
||||
if v.AuxInt != 8 {
|
||||
break
|
||||
|
|
@ -10179,7 +10179,7 @@ func rewriteValueMIPS64_OpZero_0(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [8] {t} ptr mem)
|
||||
// cond: t.(*types.Type).Alignment()%2 == 0
|
||||
// result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))))
|
||||
// result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))))
|
||||
for {
|
||||
if v.AuxInt != 8 {
|
||||
break
|
||||
|
|
@ -10232,7 +10232,7 @@ func rewriteValueMIPS64_OpZero_10(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Zero [3] ptr mem)
|
||||
// cond:
|
||||
// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))
|
||||
// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))
|
||||
for {
|
||||
if v.AuxInt != 3 {
|
||||
break
|
||||
|
|
@ -10265,7 +10265,7 @@ func rewriteValueMIPS64_OpZero_10(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [6] {t} ptr mem)
|
||||
// cond: t.(*types.Type).Alignment()%2 == 0
|
||||
// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))
|
||||
// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))
|
||||
for {
|
||||
if v.AuxInt != 6 {
|
||||
break
|
||||
|
|
@ -10302,7 +10302,7 @@ func rewriteValueMIPS64_OpZero_10(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [12] {t} ptr mem)
|
||||
// cond: t.(*types.Type).Alignment()%4 == 0
|
||||
// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)))
|
||||
// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)))
|
||||
for {
|
||||
if v.AuxInt != 12 {
|
||||
break
|
||||
|
|
@ -10339,7 +10339,7 @@ func rewriteValueMIPS64_OpZero_10(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [16] {t} ptr mem)
|
||||
// cond: t.(*types.Type).Alignment()%8 == 0
|
||||
// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))
|
||||
// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))
|
||||
for {
|
||||
if v.AuxInt != 16 {
|
||||
break
|
||||
|
|
@ -10369,7 +10369,7 @@ func rewriteValueMIPS64_OpZero_10(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [24] {t} ptr mem)
|
||||
// cond: t.(*types.Type).Alignment()%8 == 0
|
||||
// result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)))
|
||||
// result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)))
|
||||
for {
|
||||
if v.AuxInt != 24 {
|
||||
break
|
||||
|
|
@ -10405,7 +10405,7 @@ func rewriteValueMIPS64_OpZero_10(v *Value) bool {
|
|||
return true
|
||||
}
|
||||
// match: (Zero [s] {t} ptr mem)
|
||||
// cond: s%8 == 0 && s > 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice
|
||||
// cond: s%8 == 0 && s > 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice
|
||||
// result: (DUFFZERO [8 * (128 - int64(s/8))] ptr mem)
|
||||
for {
|
||||
s := v.AuxInt
|
||||
|
|
@ -10424,7 +10424,7 @@ func rewriteValueMIPS64_OpZero_10(v *Value) bool {
|
|||
}
|
||||
// match: (Zero [s] {t} ptr mem)
|
||||
// cond: (s > 8*128 || config.noDuffDevice) || t.(*types.Type).Alignment()%8 != 0
|
||||
// result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDVconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
|
||||
// result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDVconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
|
||||
for {
|
||||
s := v.AuxInt
|
||||
t := v.Aux
|
||||
|
|
@ -10762,7 +10762,7 @@ func rewriteBlockMIPS64(b *Block) bool {
|
|||
return true
|
||||
}
|
||||
// match: (GEZ (MOVVconst [c]) yes no)
|
||||
// cond: c < 0
|
||||
// cond: c < 0
|
||||
// result: (First nil no yes)
|
||||
for {
|
||||
v := b.Control
|
||||
|
|
@ -10781,7 +10781,7 @@ func rewriteBlockMIPS64(b *Block) bool {
|
|||
}
|
||||
case BlockMIPS64GTZ:
|
||||
// match: (GTZ (MOVVconst [c]) yes no)
|
||||
// cond: c > 0
|
||||
// cond: c > 0
|
||||
// result: (First nil yes no)
|
||||
for {
|
||||
v := b.Control
|
||||
|
|
@ -10847,7 +10847,7 @@ func rewriteBlockMIPS64(b *Block) bool {
|
|||
return true
|
||||
}
|
||||
// match: (LEZ (MOVVconst [c]) yes no)
|
||||
// cond: c > 0
|
||||
// cond: c > 0
|
||||
// result: (First nil no yes)
|
||||
for {
|
||||
v := b.Control
|
||||
|
|
@ -10866,7 +10866,7 @@ func rewriteBlockMIPS64(b *Block) bool {
|
|||
}
|
||||
case BlockMIPS64LTZ:
|
||||
// match: (LTZ (MOVVconst [c]) yes no)
|
||||
// cond: c < 0
|
||||
// cond: c < 0
|
||||
// result: (First nil yes no)
|
||||
for {
|
||||
v := b.Control
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -123,7 +123,7 @@ func rewriteValuedec_OpLoad_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Load <t> ptr mem)
|
||||
// cond: t.IsComplex() && t.Size() == 8
|
||||
// result: (ComplexMake (Load <typ.Float32> ptr mem) (Load <typ.Float32> (OffPtr <typ.Float32Ptr> [4] ptr) mem) )
|
||||
// result: (ComplexMake (Load <typ.Float32> ptr mem) (Load <typ.Float32> (OffPtr <typ.Float32Ptr> [4] ptr) mem) )
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -148,7 +148,7 @@ func rewriteValuedec_OpLoad_0(v *Value) bool {
|
|||
}
|
||||
// match: (Load <t> ptr mem)
|
||||
// cond: t.IsComplex() && t.Size() == 16
|
||||
// result: (ComplexMake (Load <typ.Float64> ptr mem) (Load <typ.Float64> (OffPtr <typ.Float64Ptr> [8] ptr) mem) )
|
||||
// result: (ComplexMake (Load <typ.Float64> ptr mem) (Load <typ.Float64> (OffPtr <typ.Float64Ptr> [8] ptr) mem) )
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -173,7 +173,7 @@ func rewriteValuedec_OpLoad_0(v *Value) bool {
|
|||
}
|
||||
// match: (Load <t> ptr mem)
|
||||
// cond: t.IsString()
|
||||
// result: (StringMake (Load <typ.BytePtr> ptr mem) (Load <typ.Int> (OffPtr <typ.IntPtr> [config.PtrSize] ptr) mem))
|
||||
// result: (StringMake (Load <typ.BytePtr> ptr mem) (Load <typ.Int> (OffPtr <typ.IntPtr> [config.PtrSize] ptr) mem))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -198,7 +198,7 @@ func rewriteValuedec_OpLoad_0(v *Value) bool {
|
|||
}
|
||||
// match: (Load <t> ptr mem)
|
||||
// cond: t.IsSlice()
|
||||
// result: (SliceMake (Load <t.ElemType().PtrTo()> ptr mem) (Load <typ.Int> (OffPtr <typ.IntPtr> [config.PtrSize] ptr) mem) (Load <typ.Int> (OffPtr <typ.IntPtr> [2*config.PtrSize] ptr) mem))
|
||||
// result: (SliceMake (Load <t.ElemType().PtrTo()> ptr mem) (Load <typ.Int> (OffPtr <typ.IntPtr> [config.PtrSize] ptr) mem) (Load <typ.Int> (OffPtr <typ.IntPtr> [2*config.PtrSize] ptr) mem))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -230,7 +230,7 @@ func rewriteValuedec_OpLoad_0(v *Value) bool {
|
|||
}
|
||||
// match: (Load <t> ptr mem)
|
||||
// cond: t.IsInterface()
|
||||
// result: (IMake (Load <typ.BytePtr> ptr mem) (Load <typ.BytePtr> (OffPtr <typ.BytePtrPtr> [config.PtrSize] ptr) mem))
|
||||
// result: (IMake (Load <typ.BytePtr> ptr mem) (Load <typ.BytePtr> (OffPtr <typ.BytePtrPtr> [config.PtrSize] ptr) mem))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -318,7 +318,7 @@ func rewriteValuedec_OpStore_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Store {t} dst (ComplexMake real imag) mem)
|
||||
// cond: t.(*types.Type).Size() == 8
|
||||
// result: (Store {typ.Float32} (OffPtr <typ.Float32Ptr> [4] dst) imag (Store {typ.Float32} dst real mem))
|
||||
// result: (Store {typ.Float32} (OffPtr <typ.Float32Ptr> [4] dst) imag (Store {typ.Float32} dst real mem))
|
||||
for {
|
||||
t := v.Aux
|
||||
_ = v.Args[2]
|
||||
|
|
@ -351,7 +351,7 @@ func rewriteValuedec_OpStore_0(v *Value) bool {
|
|||
}
|
||||
// match: (Store {t} dst (ComplexMake real imag) mem)
|
||||
// cond: t.(*types.Type).Size() == 16
|
||||
// result: (Store {typ.Float64} (OffPtr <typ.Float64Ptr> [8] dst) imag (Store {typ.Float64} dst real mem))
|
||||
// result: (Store {typ.Float64} (OffPtr <typ.Float64Ptr> [8] dst) imag (Store {typ.Float64} dst real mem))
|
||||
for {
|
||||
t := v.Aux
|
||||
_ = v.Args[2]
|
||||
|
|
@ -384,7 +384,7 @@ func rewriteValuedec_OpStore_0(v *Value) bool {
|
|||
}
|
||||
// match: (Store dst (StringMake ptr len) mem)
|
||||
// cond:
|
||||
// result: (Store {typ.Int} (OffPtr <typ.IntPtr> [config.PtrSize] dst) len (Store {typ.BytePtr} dst ptr mem))
|
||||
// result: (Store {typ.Int} (OffPtr <typ.IntPtr> [config.PtrSize] dst) len (Store {typ.BytePtr} dst ptr mem))
|
||||
for {
|
||||
_ = v.Args[2]
|
||||
dst := v.Args[0]
|
||||
|
|
@ -413,7 +413,7 @@ func rewriteValuedec_OpStore_0(v *Value) bool {
|
|||
}
|
||||
// match: (Store dst (SliceMake ptr len cap) mem)
|
||||
// cond:
|
||||
// result: (Store {typ.Int} (OffPtr <typ.IntPtr> [2*config.PtrSize] dst) cap (Store {typ.Int} (OffPtr <typ.IntPtr> [config.PtrSize] dst) len (Store {typ.BytePtr} dst ptr mem)))
|
||||
// result: (Store {typ.Int} (OffPtr <typ.IntPtr> [2*config.PtrSize] dst) cap (Store {typ.Int} (OffPtr <typ.IntPtr> [config.PtrSize] dst) len (Store {typ.BytePtr} dst ptr mem)))
|
||||
for {
|
||||
_ = v.Args[2]
|
||||
dst := v.Args[0]
|
||||
|
|
@ -451,7 +451,7 @@ func rewriteValuedec_OpStore_0(v *Value) bool {
|
|||
}
|
||||
// match: (Store dst (IMake itab data) mem)
|
||||
// cond:
|
||||
// result: (Store {typ.BytePtr} (OffPtr <typ.BytePtrPtr> [config.PtrSize] dst) data (Store {typ.Uintptr} dst itab mem))
|
||||
// result: (Store {typ.BytePtr} (OffPtr <typ.BytePtrPtr> [config.PtrSize] dst) data (Store {typ.Uintptr} dst itab mem))
|
||||
for {
|
||||
_ = v.Args[2]
|
||||
dst := v.Args[0]
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ func rewriteValuedec64_OpAdd64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Add64 x y)
|
||||
// cond:
|
||||
// result: (Int64Make (Add32withcarry <typ.Int32> (Int64Hi x) (Int64Hi y) (Select1 <types.TypeFlags> (Add32carry (Int64Lo x) (Int64Lo y)))) (Select0 <typ.UInt32> (Add32carry (Int64Lo x) (Int64Lo y))))
|
||||
// result: (Int64Make (Add32withcarry <typ.Int32> (Int64Hi x) (Int64Hi y) (Select1 <types.TypeFlags> (Add32carry (Int64Lo x) (Int64Lo y)))) (Select0 <typ.UInt32> (Add32carry (Int64Lo x) (Int64Lo y))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -183,7 +183,7 @@ func rewriteValuedec64_OpAnd64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (And64 x y)
|
||||
// cond:
|
||||
// result: (Int64Make (And32 <typ.UInt32> (Int64Hi x) (Int64Hi y)) (And32 <typ.UInt32> (Int64Lo x) (Int64Lo y)))
|
||||
// result: (Int64Make (And32 <typ.UInt32> (Int64Hi x) (Int64Hi y)) (And32 <typ.UInt32> (Int64Lo x) (Int64Lo y)))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -217,7 +217,7 @@ func rewriteValuedec64_OpArg_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Arg {n} [off])
|
||||
// cond: is64BitInt(v.Type) && !config.BigEndian && v.Type.IsSigned()
|
||||
// result: (Int64Make (Arg <typ.Int32> {n} [off+4]) (Arg <typ.UInt32> {n} [off]))
|
||||
// result: (Int64Make (Arg <typ.Int32> {n} [off+4]) (Arg <typ.UInt32> {n} [off]))
|
||||
for {
|
||||
off := v.AuxInt
|
||||
n := v.Aux
|
||||
|
|
@ -237,7 +237,7 @@ func rewriteValuedec64_OpArg_0(v *Value) bool {
|
|||
}
|
||||
// match: (Arg {n} [off])
|
||||
// cond: is64BitInt(v.Type) && !config.BigEndian && !v.Type.IsSigned()
|
||||
// result: (Int64Make (Arg <typ.UInt32> {n} [off+4]) (Arg <typ.UInt32> {n} [off]))
|
||||
// result: (Int64Make (Arg <typ.UInt32> {n} [off+4]) (Arg <typ.UInt32> {n} [off]))
|
||||
for {
|
||||
off := v.AuxInt
|
||||
n := v.Aux
|
||||
|
|
@ -257,7 +257,7 @@ func rewriteValuedec64_OpArg_0(v *Value) bool {
|
|||
}
|
||||
// match: (Arg {n} [off])
|
||||
// cond: is64BitInt(v.Type) && config.BigEndian && v.Type.IsSigned()
|
||||
// result: (Int64Make (Arg <typ.Int32> {n} [off]) (Arg <typ.UInt32> {n} [off+4]))
|
||||
// result: (Int64Make (Arg <typ.Int32> {n} [off]) (Arg <typ.UInt32> {n} [off+4]))
|
||||
for {
|
||||
off := v.AuxInt
|
||||
n := v.Aux
|
||||
|
|
@ -277,7 +277,7 @@ func rewriteValuedec64_OpArg_0(v *Value) bool {
|
|||
}
|
||||
// match: (Arg {n} [off])
|
||||
// cond: is64BitInt(v.Type) && config.BigEndian && !v.Type.IsSigned()
|
||||
// result: (Int64Make (Arg <typ.UInt32> {n} [off]) (Arg <typ.UInt32> {n} [off+4]))
|
||||
// result: (Int64Make (Arg <typ.UInt32> {n} [off]) (Arg <typ.UInt32> {n} [off+4]))
|
||||
for {
|
||||
off := v.AuxInt
|
||||
n := v.Aux
|
||||
|
|
@ -304,7 +304,7 @@ func rewriteValuedec64_OpBitLen64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (BitLen64 x)
|
||||
// cond:
|
||||
// result: (Add32 <typ.Int> (BitLen32 <typ.Int> (Int64Hi x)) (BitLen32 <typ.Int> (Or32 <typ.UInt32> (Int64Lo x) (Zeromask (Int64Hi x)))))
|
||||
// result: (Add32 <typ.Int> (BitLen32 <typ.Int> (Int64Hi x)) (BitLen32 <typ.Int> (Or32 <typ.UInt32> (Int64Lo x) (Zeromask (Int64Hi x)))))
|
||||
for {
|
||||
x := v.Args[0]
|
||||
v.reset(OpAdd32)
|
||||
|
|
@ -336,7 +336,7 @@ func rewriteValuedec64_OpBswap64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Bswap64 x)
|
||||
// cond:
|
||||
// result: (Int64Make (Bswap32 <typ.UInt32> (Int64Lo x)) (Bswap32 <typ.UInt32> (Int64Hi x)))
|
||||
// result: (Int64Make (Bswap32 <typ.UInt32> (Int64Lo x)) (Bswap32 <typ.UInt32> (Int64Hi x)))
|
||||
for {
|
||||
x := v.Args[0]
|
||||
v.reset(OpInt64Make)
|
||||
|
|
@ -360,7 +360,7 @@ func rewriteValuedec64_OpCom64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Com64 x)
|
||||
// cond:
|
||||
// result: (Int64Make (Com32 <typ.UInt32> (Int64Hi x)) (Com32 <typ.UInt32> (Int64Lo x)))
|
||||
// result: (Int64Make (Com32 <typ.UInt32> (Int64Hi x)) (Com32 <typ.UInt32> (Int64Lo x)))
|
||||
for {
|
||||
x := v.Args[0]
|
||||
v.reset(OpInt64Make)
|
||||
|
|
@ -427,7 +427,7 @@ func rewriteValuedec64_OpCtz64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Ctz64 x)
|
||||
// cond:
|
||||
// result: (Add32 <typ.UInt32> (Ctz32 <typ.UInt32> (Int64Lo x)) (And32 <typ.UInt32> (Com32 <typ.UInt32> (Zeromask (Int64Lo x))) (Ctz32 <typ.UInt32> (Int64Hi x))))
|
||||
// result: (Add32 <typ.UInt32> (Ctz32 <typ.UInt32> (Int64Lo x)) (And32 <typ.UInt32> (Com32 <typ.UInt32> (Zeromask (Int64Lo x))) (Ctz32 <typ.UInt32> (Int64Hi x))))
|
||||
for {
|
||||
x := v.Args[0]
|
||||
v.reset(OpAdd32)
|
||||
|
|
@ -461,7 +461,7 @@ func rewriteValuedec64_OpEq64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Eq64 x y)
|
||||
// cond:
|
||||
// result: (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Eq32 (Int64Lo x) (Int64Lo y)))
|
||||
// result: (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Eq32 (Int64Lo x) (Int64Lo y)))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -493,7 +493,7 @@ func rewriteValuedec64_OpGeq64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Geq64 x y)
|
||||
// cond:
|
||||
// result: (OrB (Greater32 (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Geq32U (Int64Lo x) (Int64Lo y))))
|
||||
// result: (OrB (Greater32 (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Geq32U (Int64Lo x) (Int64Lo y))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -535,7 +535,7 @@ func rewriteValuedec64_OpGeq64U_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Geq64U x y)
|
||||
// cond:
|
||||
// result: (OrB (Greater32U (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Geq32U (Int64Lo x) (Int64Lo y))))
|
||||
// result: (OrB (Greater32U (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Geq32U (Int64Lo x) (Int64Lo y))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -577,7 +577,7 @@ func rewriteValuedec64_OpGreater64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Greater64 x y)
|
||||
// cond:
|
||||
// result: (OrB (Greater32 (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Greater32U (Int64Lo x) (Int64Lo y))))
|
||||
// result: (OrB (Greater32 (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Greater32U (Int64Lo x) (Int64Lo y))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -619,7 +619,7 @@ func rewriteValuedec64_OpGreater64U_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Greater64U x y)
|
||||
// cond:
|
||||
// result: (OrB (Greater32U (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Greater32U (Int64Lo x) (Int64Lo y))))
|
||||
// result: (OrB (Greater32U (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Greater32U (Int64Lo x) (Int64Lo y))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -697,7 +697,7 @@ func rewriteValuedec64_OpLeq64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Leq64 x y)
|
||||
// cond:
|
||||
// result: (OrB (Less32 (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Leq32U (Int64Lo x) (Int64Lo y))))
|
||||
// result: (OrB (Less32 (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Leq32U (Int64Lo x) (Int64Lo y))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -739,7 +739,7 @@ func rewriteValuedec64_OpLeq64U_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Leq64U x y)
|
||||
// cond:
|
||||
// result: (OrB (Less32U (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Leq32U (Int64Lo x) (Int64Lo y))))
|
||||
// result: (OrB (Less32U (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Leq32U (Int64Lo x) (Int64Lo y))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -781,7 +781,7 @@ func rewriteValuedec64_OpLess64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Less64 x y)
|
||||
// cond:
|
||||
// result: (OrB (Less32 (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Less32U (Int64Lo x) (Int64Lo y))))
|
||||
// result: (OrB (Less32 (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Less32U (Int64Lo x) (Int64Lo y))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -823,7 +823,7 @@ func rewriteValuedec64_OpLess64U_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Less64U x y)
|
||||
// cond:
|
||||
// result: (OrB (Less32U (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Less32U (Int64Lo x) (Int64Lo y))))
|
||||
// result: (OrB (Less32U (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Less32U (Int64Lo x) (Int64Lo y))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -867,7 +867,7 @@ func rewriteValuedec64_OpLoad_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Load <t> ptr mem)
|
||||
// cond: is64BitInt(t) && !config.BigEndian && t.IsSigned()
|
||||
// result: (Int64Make (Load <typ.Int32> (OffPtr <typ.Int32Ptr> [4] ptr) mem) (Load <typ.UInt32> ptr mem))
|
||||
// result: (Int64Make (Load <typ.Int32> (OffPtr <typ.Int32Ptr> [4] ptr) mem) (Load <typ.UInt32> ptr mem))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -892,7 +892,7 @@ func rewriteValuedec64_OpLoad_0(v *Value) bool {
|
|||
}
|
||||
// match: (Load <t> ptr mem)
|
||||
// cond: is64BitInt(t) && !config.BigEndian && !t.IsSigned()
|
||||
// result: (Int64Make (Load <typ.UInt32> (OffPtr <typ.UInt32Ptr> [4] ptr) mem) (Load <typ.UInt32> ptr mem))
|
||||
// result: (Int64Make (Load <typ.UInt32> (OffPtr <typ.UInt32Ptr> [4] ptr) mem) (Load <typ.UInt32> ptr mem))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -917,7 +917,7 @@ func rewriteValuedec64_OpLoad_0(v *Value) bool {
|
|||
}
|
||||
// match: (Load <t> ptr mem)
|
||||
// cond: is64BitInt(t) && config.BigEndian && t.IsSigned()
|
||||
// result: (Int64Make (Load <typ.Int32> ptr mem) (Load <typ.UInt32> (OffPtr <typ.UInt32Ptr> [4] ptr) mem))
|
||||
// result: (Int64Make (Load <typ.Int32> ptr mem) (Load <typ.UInt32> (OffPtr <typ.UInt32Ptr> [4] ptr) mem))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -942,7 +942,7 @@ func rewriteValuedec64_OpLoad_0(v *Value) bool {
|
|||
}
|
||||
// match: (Load <t> ptr mem)
|
||||
// cond: is64BitInt(t) && config.BigEndian && !t.IsSigned()
|
||||
// result: (Int64Make (Load <typ.UInt32> ptr mem) (Load <typ.UInt32> (OffPtr <typ.UInt32Ptr> [4] ptr) mem))
|
||||
// result: (Int64Make (Load <typ.UInt32> ptr mem) (Load <typ.UInt32> (OffPtr <typ.UInt32Ptr> [4] ptr) mem))
|
||||
for {
|
||||
t := v.Type
|
||||
_ = v.Args[1]
|
||||
|
|
@ -1132,7 +1132,7 @@ func rewriteValuedec64_OpLsh64x16_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Lsh64x16 (Int64Make hi lo) s)
|
||||
// cond:
|
||||
// result: (Int64Make (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Lsh32x16 <typ.UInt32> hi s) (Rsh32Ux16 <typ.UInt32> lo (Sub16 <typ.UInt16> (Const16 <typ.UInt16> [32]) s))) (Lsh32x16 <typ.UInt32> lo (Sub16 <typ.UInt16> s (Const16 <typ.UInt16> [32])))) (Lsh32x16 <typ.UInt32> lo s))
|
||||
// result: (Int64Make (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Lsh32x16 <typ.UInt32> hi s) (Rsh32Ux16 <typ.UInt32> lo (Sub16 <typ.UInt16> (Const16 <typ.UInt16> [32]) s))) (Lsh32x16 <typ.UInt32> lo (Sub16 <typ.UInt16> s (Const16 <typ.UInt16> [32])))) (Lsh32x16 <typ.UInt32> lo s))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
|
|
@ -1185,7 +1185,7 @@ func rewriteValuedec64_OpLsh64x32_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Lsh64x32 (Int64Make hi lo) s)
|
||||
// cond:
|
||||
// result: (Int64Make (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Lsh32x32 <typ.UInt32> hi s) (Rsh32Ux32 <typ.UInt32> lo (Sub32 <typ.UInt32> (Const32 <typ.UInt32> [32]) s))) (Lsh32x32 <typ.UInt32> lo (Sub32 <typ.UInt32> s (Const32 <typ.UInt32> [32])))) (Lsh32x32 <typ.UInt32> lo s))
|
||||
// result: (Int64Make (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Lsh32x32 <typ.UInt32> hi s) (Rsh32Ux32 <typ.UInt32> lo (Sub32 <typ.UInt32> (Const32 <typ.UInt32> [32]) s))) (Lsh32x32 <typ.UInt32> lo (Sub32 <typ.UInt32> s (Const32 <typ.UInt32> [32])))) (Lsh32x32 <typ.UInt32> lo s))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
|
|
@ -1317,7 +1317,7 @@ func rewriteValuedec64_OpLsh64x8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Lsh64x8 (Int64Make hi lo) s)
|
||||
// cond:
|
||||
// result: (Int64Make (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Lsh32x8 <typ.UInt32> hi s) (Rsh32Ux8 <typ.UInt32> lo (Sub8 <typ.UInt8> (Const8 <typ.UInt8> [32]) s))) (Lsh32x8 <typ.UInt32> lo (Sub8 <typ.UInt8> s (Const8 <typ.UInt8> [32])))) (Lsh32x8 <typ.UInt32> lo s))
|
||||
// result: (Int64Make (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Lsh32x8 <typ.UInt32> hi s) (Rsh32Ux8 <typ.UInt32> lo (Sub8 <typ.UInt8> (Const8 <typ.UInt8> [32]) s))) (Lsh32x8 <typ.UInt32> lo (Sub8 <typ.UInt8> s (Const8 <typ.UInt8> [32])))) (Lsh32x8 <typ.UInt32> lo s))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
|
|
@ -1449,7 +1449,7 @@ func rewriteValuedec64_OpMul64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Mul64 x y)
|
||||
// cond:
|
||||
// result: (Int64Make (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Int64Lo x) (Int64Hi y)) (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Int64Hi x) (Int64Lo y)) (Select0 <typ.UInt32> (Mul32uhilo (Int64Lo x) (Int64Lo y))))) (Select1 <typ.UInt32> (Mul32uhilo (Int64Lo x) (Int64Lo y))))
|
||||
// result: (Int64Make (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Int64Lo x) (Int64Hi y)) (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Int64Hi x) (Int64Lo y)) (Select0 <typ.UInt32> (Mul32uhilo (Int64Lo x) (Int64Lo y))))) (Select1 <typ.UInt32> (Mul32uhilo (Int64Lo x) (Int64Lo y))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -1522,7 +1522,7 @@ func rewriteValuedec64_OpNeq64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Neq64 x y)
|
||||
// cond:
|
||||
// result: (OrB (Neq32 (Int64Hi x) (Int64Hi y)) (Neq32 (Int64Lo x) (Int64Lo y)))
|
||||
// result: (OrB (Neq32 (Int64Hi x) (Int64Hi y)) (Neq32 (Int64Lo x) (Int64Lo y)))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -1554,7 +1554,7 @@ func rewriteValuedec64_OpOr64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Or64 x y)
|
||||
// cond:
|
||||
// result: (Int64Make (Or32 <typ.UInt32> (Int64Hi x) (Int64Hi y)) (Or32 <typ.UInt32> (Int64Lo x) (Int64Lo y)))
|
||||
// result: (Int64Make (Or32 <typ.UInt32> (Int64Hi x) (Int64Hi y)) (Or32 <typ.UInt32> (Int64Lo x) (Int64Lo y)))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -1906,7 +1906,7 @@ func rewriteValuedec64_OpRsh64Ux16_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Rsh64Ux16 (Int64Make hi lo) s)
|
||||
// cond:
|
||||
// result: (Int64Make (Rsh32Ux16 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux16 <typ.UInt32> lo s) (Lsh32x16 <typ.UInt32> hi (Sub16 <typ.UInt16> (Const16 <typ.UInt16> [32]) s))) (Rsh32Ux16 <typ.UInt32> hi (Sub16 <typ.UInt16> s (Const16 <typ.UInt16> [32])))))
|
||||
// result: (Int64Make (Rsh32Ux16 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux16 <typ.UInt32> lo s) (Lsh32x16 <typ.UInt32> hi (Sub16 <typ.UInt16> (Const16 <typ.UInt16> [32]) s))) (Rsh32Ux16 <typ.UInt32> hi (Sub16 <typ.UInt16> s (Const16 <typ.UInt16> [32])))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
|
|
@ -1959,7 +1959,7 @@ func rewriteValuedec64_OpRsh64Ux32_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Rsh64Ux32 (Int64Make hi lo) s)
|
||||
// cond:
|
||||
// result: (Int64Make (Rsh32Ux32 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux32 <typ.UInt32> lo s) (Lsh32x32 <typ.UInt32> hi (Sub32 <typ.UInt32> (Const32 <typ.UInt32> [32]) s))) (Rsh32Ux32 <typ.UInt32> hi (Sub32 <typ.UInt32> s (Const32 <typ.UInt32> [32])))))
|
||||
// result: (Int64Make (Rsh32Ux32 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux32 <typ.UInt32> lo s) (Lsh32x32 <typ.UInt32> hi (Sub32 <typ.UInt32> (Const32 <typ.UInt32> [32]) s))) (Rsh32Ux32 <typ.UInt32> hi (Sub32 <typ.UInt32> s (Const32 <typ.UInt32> [32])))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
|
|
@ -2091,7 +2091,7 @@ func rewriteValuedec64_OpRsh64Ux8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Rsh64Ux8 (Int64Make hi lo) s)
|
||||
// cond:
|
||||
// result: (Int64Make (Rsh32Ux8 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux8 <typ.UInt32> lo s) (Lsh32x8 <typ.UInt32> hi (Sub8 <typ.UInt8> (Const8 <typ.UInt8> [32]) s))) (Rsh32Ux8 <typ.UInt32> hi (Sub8 <typ.UInt8> s (Const8 <typ.UInt8> [32])))))
|
||||
// result: (Int64Make (Rsh32Ux8 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux8 <typ.UInt32> lo s) (Lsh32x8 <typ.UInt32> hi (Sub8 <typ.UInt8> (Const8 <typ.UInt8> [32]) s))) (Rsh32Ux8 <typ.UInt32> hi (Sub8 <typ.UInt8> s (Const8 <typ.UInt8> [32])))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
|
|
@ -2144,7 +2144,7 @@ func rewriteValuedec64_OpRsh64x16_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Rsh64x16 (Int64Make hi lo) s)
|
||||
// cond:
|
||||
// result: (Int64Make (Rsh32x16 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux16 <typ.UInt32> lo s) (Lsh32x16 <typ.UInt32> hi (Sub16 <typ.UInt16> (Const16 <typ.UInt16> [32]) s))) (And32 <typ.UInt32> (Rsh32x16 <typ.UInt32> hi (Sub16 <typ.UInt16> s (Const16 <typ.UInt16> [32]))) (Zeromask (ZeroExt16to32 (Rsh16Ux32 <typ.UInt16> s (Const32 <typ.UInt32> [5])))))))
|
||||
// result: (Int64Make (Rsh32x16 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux16 <typ.UInt32> lo s) (Lsh32x16 <typ.UInt32> hi (Sub16 <typ.UInt16> (Const16 <typ.UInt16> [32]) s))) (And32 <typ.UInt32> (Rsh32x16 <typ.UInt32> hi (Sub16 <typ.UInt16> s (Const16 <typ.UInt16> [32]))) (Zeromask (ZeroExt16to32 (Rsh16Ux32 <typ.UInt16> s (Const32 <typ.UInt32> [5])))))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
|
|
@ -2209,7 +2209,7 @@ func rewriteValuedec64_OpRsh64x32_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Rsh64x32 (Int64Make hi lo) s)
|
||||
// cond:
|
||||
// result: (Int64Make (Rsh32x32 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux32 <typ.UInt32> lo s) (Lsh32x32 <typ.UInt32> hi (Sub32 <typ.UInt32> (Const32 <typ.UInt32> [32]) s))) (And32 <typ.UInt32> (Rsh32x32 <typ.UInt32> hi (Sub32 <typ.UInt32> s (Const32 <typ.UInt32> [32]))) (Zeromask (Rsh32Ux32 <typ.UInt32> s (Const32 <typ.UInt32> [5]))))))
|
||||
// result: (Int64Make (Rsh32x32 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux32 <typ.UInt32> lo s) (Lsh32x32 <typ.UInt32> hi (Sub32 <typ.UInt32> (Const32 <typ.UInt32> [32]) s))) (And32 <typ.UInt32> (Rsh32x32 <typ.UInt32> hi (Sub32 <typ.UInt32> s (Const32 <typ.UInt32> [32]))) (Zeromask (Rsh32Ux32 <typ.UInt32> s (Const32 <typ.UInt32> [5]))))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
|
|
@ -2361,7 +2361,7 @@ func rewriteValuedec64_OpRsh64x8_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Rsh64x8 (Int64Make hi lo) s)
|
||||
// cond:
|
||||
// result: (Int64Make (Rsh32x8 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux8 <typ.UInt32> lo s) (Lsh32x8 <typ.UInt32> hi (Sub8 <typ.UInt8> (Const8 <typ.UInt8> [32]) s))) (And32 <typ.UInt32> (Rsh32x8 <typ.UInt32> hi (Sub8 <typ.UInt8> s (Const8 <typ.UInt8> [32]))) (Zeromask (ZeroExt8to32 (Rsh8Ux32 <typ.UInt8> s (Const32 <typ.UInt32> [5])))))))
|
||||
// result: (Int64Make (Rsh32x8 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux8 <typ.UInt32> lo s) (Lsh32x8 <typ.UInt32> hi (Sub8 <typ.UInt8> (Const8 <typ.UInt8> [32]) s))) (And32 <typ.UInt32> (Rsh32x8 <typ.UInt32> hi (Sub8 <typ.UInt8> s (Const8 <typ.UInt8> [32]))) (Zeromask (ZeroExt8to32 (Rsh8Ux32 <typ.UInt8> s (Const32 <typ.UInt32> [5])))))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
v_0 := v.Args[0]
|
||||
|
|
@ -2639,7 +2639,7 @@ func rewriteValuedec64_OpStore_0(v *Value) bool {
|
|||
_ = config
|
||||
// match: (Store {t} dst (Int64Make hi lo) mem)
|
||||
// cond: t.(*types.Type).Size() == 8 && !config.BigEndian
|
||||
// result: (Store {hi.Type} (OffPtr <hi.Type.PtrTo()> [4] dst) hi (Store {lo.Type} dst lo mem))
|
||||
// result: (Store {hi.Type} (OffPtr <hi.Type.PtrTo()> [4] dst) hi (Store {lo.Type} dst lo mem))
|
||||
for {
|
||||
t := v.Aux
|
||||
_ = v.Args[2]
|
||||
|
|
@ -2672,7 +2672,7 @@ func rewriteValuedec64_OpStore_0(v *Value) bool {
|
|||
}
|
||||
// match: (Store {t} dst (Int64Make hi lo) mem)
|
||||
// cond: t.(*types.Type).Size() == 8 && config.BigEndian
|
||||
// result: (Store {lo.Type} (OffPtr <lo.Type.PtrTo()> [4] dst) lo (Store {hi.Type} dst hi mem))
|
||||
// result: (Store {lo.Type} (OffPtr <lo.Type.PtrTo()> [4] dst) lo (Store {hi.Type} dst hi mem))
|
||||
for {
|
||||
t := v.Aux
|
||||
_ = v.Args[2]
|
||||
|
|
@ -2712,7 +2712,7 @@ func rewriteValuedec64_OpSub64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Sub64 x y)
|
||||
// cond:
|
||||
// result: (Int64Make (Sub32withcarry <typ.Int32> (Int64Hi x) (Int64Hi y) (Select1 <types.TypeFlags> (Sub32carry (Int64Lo x) (Int64Lo y)))) (Select0 <typ.UInt32> (Sub32carry (Int64Lo x) (Int64Lo y))))
|
||||
// result: (Int64Make (Sub32withcarry <typ.Int32> (Int64Hi x) (Int64Hi y) (Select1 <types.TypeFlags> (Sub32carry (Int64Lo x) (Int64Lo y)))) (Select0 <typ.UInt32> (Sub32carry (Int64Lo x) (Int64Lo y))))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
@ -2808,7 +2808,7 @@ func rewriteValuedec64_OpXor64_0(v *Value) bool {
|
|||
_ = typ
|
||||
// match: (Xor64 x y)
|
||||
// cond:
|
||||
// result: (Int64Make (Xor32 <typ.UInt32> (Int64Hi x) (Int64Hi y)) (Xor32 <typ.UInt32> (Int64Lo x) (Int64Lo y)))
|
||||
// result: (Int64Make (Xor32 <typ.UInt32> (Int64Hi x) (Int64Hi y)) (Xor32 <typ.UInt32> (Int64Lo x) (Int64Lo y)))
|
||||
for {
|
||||
_ = v.Args[1]
|
||||
x := v.Args[0]
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue