Commit fb54e030 authored by Keith Randall's avatar Keith Randall

[dev.ssa] cmd/compile: small improvements

Found looking at mapaccess1_faststr.

runtime.throw never returns.
Do x+y+c with an LEA.

Change-Id: I27ea6669324242a6302397cbdc73230891d97591
Reviewed-on: https://go-review.googlesource.com/19911
Run-TryBot: Keith Randall <khr@golang.org>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: 's avatarDavid Chase <drchase@google.com>
parent a5325761
......@@ -544,8 +544,9 @@ func (s *state) stmt(n *Node) {
// Expression statements
case OCALLFUNC, OCALLMETH, OCALLINTER:
s.call(n, callNormal)
if n.Op == OCALLFUNC && n.Left.Op == ONAME && n.Left.Class == PFUNC && n.Left.Sym.Pkg == Runtimepkg &&
(n.Left.Sym.Name == "gopanic" || n.Left.Sym.Name == "selectgo") {
if n.Op == OCALLFUNC && n.Left.Op == ONAME && n.Left.Class == PFUNC &&
(compiling_runtime != 0 && n.Left.Sym.Name == "throw" ||
n.Left.Sym.Pkg == Runtimepkg && (n.Left.Sym.Name == "gopanic" || n.Left.Sym.Name == "selectgo")) {
m := s.mem()
b := s.endBlock()
b.Kind = ssa.BlockExit
......
......@@ -34,6 +34,14 @@ Optimizations (better compiled code)
flag regeneration.
- In forms like if ... { call } else { no call }, mark the call branch as unlikely.
- Non-constant rotate detection.
- Do 0 <= x && x < n with one unsigned compare
- nil-check removal in indexed load/store case:
lea (%rdx,%rax,1),%rcx
test %al,(%rcx) // nil check
mov (%rdx,%rax,1),%cl // load to same address
- any pointer generated by unsafe arithmetic must be non-nil?
(Of course that may not be true in general, but it is for all uses
in the runtime, and we can play games with unsafe.)
Optimizations (better compiler)
-------------------------------
......
......@@ -558,6 +558,11 @@
(ADDQ x (ADDQ x y)) -> (LEAQ2 y x)
(ADDQ x (ADDQ y x)) -> (LEAQ2 y x)
// combine ADDQ/ADDQconst into LEAQ1
(ADDQconst [c] (ADDQ x y)) -> (LEAQ1 [c] x y)
(ADDQ (ADDQconst [c] x) y) -> (LEAQ1 [c] x y)
(ADDQ x (ADDQconst [c] y)) -> (LEAQ1 [c] x y)
// fold ADDQ into LEAQ
(ADDQconst [c] (LEAQ [d] {s} x)) -> (LEAQ [c+d] {s} x)
(LEAQ [c] {s} (ADDQconst [d] x)) -> (LEAQ [c+d] {s} x)
......@@ -818,7 +823,6 @@
(LEAQ [off1] {sym1} (LEAQ8 [off2] {sym2} x y)) && canMergeSym(sym1, sym2) ->
(LEAQ8 [addOff(off1,off2)] {mergeSym(sym1,sym2)} x y)
// lower Zero instructions with word sizes
(Zero [0] _ mem) -> mem
(Zero [1] destptr mem) -> (MOVBstoreconst [0] destptr mem)
......
......@@ -1075,6 +1075,38 @@ func rewriteValueAMD64_OpAMD64ADDQ(v *Value, config *Config) bool {
v.AddArg(x)
return true
}
// match: (ADDQ (ADDQconst [c] x) y)
// cond:
// result: (LEAQ1 [c] x y)
for {
if v.Args[0].Op != OpAMD64ADDQconst {
break
}
c := v.Args[0].AuxInt
x := v.Args[0].Args[0]
y := v.Args[1]
v.reset(OpAMD64LEAQ1)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (ADDQ x (ADDQconst [c] y))
// cond:
// result: (LEAQ1 [c] x y)
for {
x := v.Args[0]
if v.Args[1].Op != OpAMD64ADDQconst {
break
}
c := v.Args[1].AuxInt
y := v.Args[1].Args[0]
v.reset(OpAMD64LEAQ1)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (ADDQ x (LEAQ [c] {s} y))
// cond: x.Op != OpSB && y.Op != OpSB
// result: (LEAQ1 [c] {s} x y)
......@@ -1136,6 +1168,22 @@ func rewriteValueAMD64_OpAMD64ADDQ(v *Value, config *Config) bool {
func rewriteValueAMD64_OpAMD64ADDQconst(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (ADDQconst [c] (ADDQ x y))
// cond:
// result: (LEAQ1 [c] x y)
for {
c := v.AuxInt
if v.Args[0].Op != OpAMD64ADDQ {
break
}
x := v.Args[0].Args[0]
y := v.Args[0].Args[1]
v.reset(OpAMD64LEAQ1)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (ADDQconst [c] (LEAQ [d] {s} x))
// cond:
// result: (LEAQ [c+d] {s} x)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment