Commit 8c46aa54 authored by Keith Randall's avatar Keith Randall

[dev.ssa] cmd/compile/internal/ssa: Handle variables correctly

Use *Node of type ONAME instead of string as the key for variable maps.
This will prevent aliasing between two identically named but
differently scoped variables.

Introduce an Aux value that encodes the offset of a variable
from a base pointer (either global base pointer or stack pointer).

Allow LEAQ and derivatives (MOVQ, etc.) to also have such an Aux field.

Allocate space for AUTO variables in stackalloc.

Change-Id: Ibdccdaea4bbc63a1f4882959ac374f2b467e3acd
Reviewed-on: https://go-review.googlesource.com/11238Reviewed-by: 's avatarJosh Bleecher Snyder <josharian@gmail.com>
parent 37ddc270
This diff is collapsed.
...@@ -28,6 +28,14 @@ Regalloc ...@@ -28,6 +28,14 @@ Regalloc
- Floating point registers - Floating point registers
- Make calls clobber all registers - Make calls clobber all registers
StackAlloc:
- Compute size of outargs section correctly
- Sort variables so all ptr-containing ones are first (so stack
maps are smaller)
- Reuse stack slots for noninterfering and type-compatible variables
(both AUTOs and spilled Values). But see issue 8740 for what
"type-compatible variables" mean and what DWARF information provides.
Rewrites Rewrites
- Strength reduction (both arch-indep and arch-dependent?) - Strength reduction (both arch-indep and arch-dependent?)
- Start another architecture (arm?) - Start another architecture (arm?)
......
...@@ -104,6 +104,12 @@ func checkFunc(f *Func) { ...@@ -104,6 +104,12 @@ func checkFunc(f *Func) {
f.Fatalf("phi length %s does not match pred length %d for block %s", v.LongString(), len(b.Preds), b) f.Fatalf("phi length %s does not match pred length %d for block %s", v.LongString(), len(b.Preds), b)
} }
if v.Op == OpAddr {
if v.Args[0].Op != OpSP && v.Args[0].Op != OpSB {
f.Fatalf("bad arg to OpAddr %v", v)
}
}
// TODO: check for cycles in values // TODO: check for cycles in values
// TODO: check type // TODO: check type
} }
......
...@@ -56,7 +56,7 @@ func dse(f *Func) { ...@@ -56,7 +56,7 @@ func dse(f *Func) {
continue continue
} }
if last != nil { if last != nil {
b.Fatalf("two final stores - simultaneous live stores", last, v) b.Fatalf("two final stores - simultaneous live stores %s %s", last, v)
} }
last = v last = v
} }
......
...@@ -15,8 +15,8 @@ func TestDeadStore(t *testing.T) { ...@@ -15,8 +15,8 @@ func TestDeadStore(t *testing.T) {
Bloc("entry", Bloc("entry",
Valu("start", OpArg, TypeMem, 0, ".mem"), Valu("start", OpArg, TypeMem, 0, ".mem"),
Valu("v", OpConst, TypeBool, 0, true), Valu("v", OpConst, TypeBool, 0, true),
Valu("addr1", OpGlobal, ptrType, 0, nil), Valu("addr1", OpAddr, ptrType, 0, nil),
Valu("addr2", OpGlobal, ptrType, 0, nil), Valu("addr2", OpAddr, ptrType, 0, nil),
Valu("store1", OpStore, TypeMem, 0, nil, "addr1", "v", "start"), Valu("store1", OpStore, TypeMem, 0, nil, "addr1", "v", "start"),
Valu("store2", OpStore, TypeMem, 0, nil, "addr2", "v", "store1"), Valu("store2", OpStore, TypeMem, 0, nil, "addr2", "v", "store1"),
Valu("store3", OpStore, TypeMem, 0, nil, "addr1", "v", "store2"), Valu("store3", OpStore, TypeMem, 0, nil, "addr1", "v", "store2"),
...@@ -41,7 +41,7 @@ func TestDeadStorePhi(t *testing.T) { ...@@ -41,7 +41,7 @@ func TestDeadStorePhi(t *testing.T) {
Bloc("entry", Bloc("entry",
Valu("start", OpArg, TypeMem, 0, ".mem"), Valu("start", OpArg, TypeMem, 0, ".mem"),
Valu("v", OpConst, TypeBool, 0, true), Valu("v", OpConst, TypeBool, 0, true),
Valu("addr", OpGlobal, ptrType, 0, nil), Valu("addr", OpAddr, ptrType, 0, nil),
Goto("loop")), Goto("loop")),
Bloc("loop", Bloc("loop",
Valu("phi", OpPhi, TypeMem, 0, nil, "start", "store"), Valu("phi", OpPhi, TypeMem, 0, nil, "start", "store"),
...@@ -67,8 +67,8 @@ func TestDeadStoreTypes(t *testing.T) { ...@@ -67,8 +67,8 @@ func TestDeadStoreTypes(t *testing.T) {
Bloc("entry", Bloc("entry",
Valu("start", OpArg, TypeMem, 0, ".mem"), Valu("start", OpArg, TypeMem, 0, ".mem"),
Valu("v", OpConst, TypeBool, 0, true), Valu("v", OpConst, TypeBool, 0, true),
Valu("addr1", OpGlobal, t1, 0, nil), Valu("addr1", OpAddr, t1, 0, nil),
Valu("addr2", OpGlobal, t2, 0, nil), Valu("addr2", OpAddr, t2, 0, nil),
Valu("store1", OpStore, TypeMem, 0, nil, "addr1", "v", "start"), Valu("store1", OpStore, TypeMem, 0, nil, "addr1", "v", "start"),
Valu("store2", OpStore, TypeMem, 0, nil, "addr2", "v", "store1"), Valu("store2", OpStore, TypeMem, 0, nil, "addr2", "v", "store1"),
Goto("exit")), Goto("exit")),
......
...@@ -68,6 +68,8 @@ ...@@ -68,6 +68,8 @@
(Const <t> [val]) && t.IsInteger() -> (MOVQconst [val]) (Const <t> [val]) && t.IsInteger() -> (MOVQconst [val])
(Addr {sym} base) -> (LEAQ {sym} base)
// block rewrites // block rewrites
(If (SETL cmp) yes no) -> (LT cmp yes no) (If (SETL cmp) yes no) -> (LT cmp yes no)
(If (SETNE cmp) yes no) -> (NE cmp yes no) (If (SETNE cmp) yes no) -> (NE cmp yes no)
...@@ -80,9 +82,6 @@ ...@@ -80,9 +82,6 @@
// Rules below here apply some simple optimizations after lowering. // Rules below here apply some simple optimizations after lowering.
// TODO: Should this be a separate pass? // TODO: Should this be a separate pass?
// global loads/stores
(Global {sym}) -> (LEAQglobal {sym})
// fold constants into instructions // fold constants into instructions
(ADDQ x (MOVQconst [c])) -> (ADDQconst [c] x) // TODO: restrict c to int32 range? (ADDQ x (MOVQconst [c])) -> (ADDQconst [c] x) // TODO: restrict c to int32 range?
(ADDQ (MOVQconst [c]) x) -> (ADDQconst [c] x) (ADDQ (MOVQconst [c]) x) -> (ADDQconst [c] x)
...@@ -119,6 +118,11 @@ ...@@ -119,6 +118,11 @@
(MOVQload [off1] (ADDQconst [off2] ptr) mem) -> (MOVQload [addOff(off1, off2)] ptr mem) (MOVQload [off1] (ADDQconst [off2] ptr) mem) -> (MOVQload [addOff(off1, off2)] ptr mem)
(MOVQstore [off1] (ADDQconst [off2] ptr) val mem) -> (MOVQstore [addOff(off1, off2)] ptr val mem) (MOVQstore [off1] (ADDQconst [off2] ptr) val mem) -> (MOVQstore [addOff(off1, off2)] ptr val mem)
(MOVQload [off1] {sym1} (LEAQ [off2] {sym2} base) mem) && (sym1 == nil || sym2 == nil) ->
(MOVQload [addOff(off1,off2)] {mergeSym(sym1,sym2)} base mem)
(MOVQstore [off1] {sym1} (LEAQ [off2] {sym2} base) val mem) && (sym1 == nil || sym2 == nil) ->
(MOVQstore [addOff(off1,off2)] {mergeSym(sym1,sym2)} base val mem)
// indexed loads and stores // indexed loads and stores
(MOVQload [off1] (LEAQ8 [off2] ptr idx) mem) -> (MOVQloadidx8 [addOff(off1, off2)] ptr idx mem) (MOVQload [off1] (LEAQ8 [off2] ptr idx) mem) -> (MOVQloadidx8 [addOff(off1, off2)] ptr idx mem)
(MOVQstore [off1] (LEAQ8 [off2] ptr idx) val mem) -> (MOVQstoreidx8 [addOff(off1, off2)] ptr idx val mem) (MOVQstore [off1] (LEAQ8 [off2] ptr idx) val mem) -> (MOVQstoreidx8 [addOff(off1, off2)] ptr idx val mem)
......
...@@ -42,7 +42,7 @@ var regNamesAMD64 = []string{ ...@@ -42,7 +42,7 @@ var regNamesAMD64 = []string{
".X15", ".X15",
// pseudo-registers // pseudo-registers
".FP", ".SB",
".FLAGS", ".FLAGS",
} }
...@@ -71,19 +71,22 @@ func init() { ...@@ -71,19 +71,22 @@ func init() {
} }
gp := buildReg("AX CX DX BX BP SI DI R8 R9 R10 R11 R12 R13 R14 R15") gp := buildReg("AX CX DX BX BP SI DI R8 R9 R10 R11 R12 R13 R14 R15")
gpsp := gp | buildReg("SP FP") gpsp := gp | buildReg("SP")
gpspsb := gpsp | buildReg("SB")
flags := buildReg("FLAGS") flags := buildReg("FLAGS")
gp01 := regInfo{[]regMask{}, 0, []regMask{gp}} gp01 := regInfo{[]regMask{}, 0, []regMask{gp}}
gp11 := regInfo{[]regMask{gpsp}, 0, []regMask{gp}} gp11 := regInfo{[]regMask{gpsp}, 0, []regMask{gp}}
gp11sb := regInfo{[]regMask{gpspsb}, 0, []regMask{gp}}
gp21 := regInfo{[]regMask{gpsp, gpsp}, 0, []regMask{gp}} gp21 := regInfo{[]regMask{gpsp, gpsp}, 0, []regMask{gp}}
gp21sb := regInfo{[]regMask{gpspsb, gpsp}, 0, []regMask{gp}}
gp21shift := regInfo{[]regMask{gpsp, buildReg("CX")}, 0, []regMask{gp}} gp21shift := regInfo{[]regMask{gpsp, buildReg("CX")}, 0, []regMask{gp}}
gp2flags := regInfo{[]regMask{gpsp, gpsp}, 0, []regMask{flags}} gp2flags := regInfo{[]regMask{gpsp, gpsp}, 0, []regMask{flags}}
gp1flags := regInfo{[]regMask{gpsp}, 0, []regMask{flags}} gp1flags := regInfo{[]regMask{gpsp}, 0, []regMask{flags}}
flagsgp1 := regInfo{[]regMask{flags}, 0, []regMask{gp}} flagsgp1 := regInfo{[]regMask{flags}, 0, []regMask{gp}}
gpload := regInfo{[]regMask{gpsp, 0}, 0, []regMask{gp}} gpload := regInfo{[]regMask{gpspsb, 0}, 0, []regMask{gp}}
gploadidx := regInfo{[]regMask{gpsp, gpsp, 0}, 0, []regMask{gp}} gploadidx := regInfo{[]regMask{gpspsb, gpsp, 0}, 0, []regMask{gp}}
gpstore := regInfo{[]regMask{gpsp, gpsp, 0}, 0, nil} gpstore := regInfo{[]regMask{gpspsb, gpsp, 0}, 0, nil}
gpstoreidx := regInfo{[]regMask{gpsp, gpsp, gpsp, 0}, 0, nil} gpstoreidx := regInfo{[]regMask{gpspsb, gpsp, gpsp, 0}, 0, nil}
flagsgp := regInfo{[]regMask{flags}, 0, []regMask{gp}} flagsgp := regInfo{[]regMask{flags}, 0, []regMask{gp}}
cmov := regInfo{[]regMask{flags, gp, gp}, 0, []regMask{gp}} cmov := regInfo{[]regMask{flags, gp, gp}, 0, []regMask{gp}}
...@@ -129,12 +132,12 @@ func init() { ...@@ -129,12 +132,12 @@ func init() {
{name: "MOVWQSX", reg: gp11, asm: "MOVWQSX"}, // extend arg0 from int16 to int64 {name: "MOVWQSX", reg: gp11, asm: "MOVWQSX"}, // extend arg0 from int16 to int64
{name: "MOVBQSX", reg: gp11, asm: "MOVBQSX"}, // extend arg0 from int8 to int64 {name: "MOVBQSX", reg: gp11, asm: "MOVBQSX"}, // extend arg0 from int8 to int64
{name: "MOVQconst", reg: gp01}, // auxint {name: "MOVQconst", reg: gp01}, // auxint
{name: "LEAQ", reg: gp21}, // arg0 + arg1 + auxint {name: "LEAQ", reg: gp11sb}, // arg0 + auxint + offset encoded in aux
{name: "LEAQ2", reg: gp21}, // arg0 + 2*arg1 + auxint {name: "LEAQ1", reg: gp21sb}, // arg0 + arg1 + auxint
{name: "LEAQ4", reg: gp21}, // arg0 + 4*arg1 + auxint {name: "LEAQ2", reg: gp21sb}, // arg0 + 2*arg1 + auxint
{name: "LEAQ8", reg: gp21}, // arg0 + 8*arg1 + auxint {name: "LEAQ4", reg: gp21sb}, // arg0 + 4*arg1 + auxint
{name: "LEAQglobal", reg: gp01}, // no args. address of aux.(*gc.Sym) {name: "LEAQ8", reg: gp21sb}, // arg0 + 8*arg1 + auxint
{name: "MOVBload", reg: gpload, asm: "MOVB"}, // load byte from arg0+auxint. arg1=mem {name: "MOVBload", reg: gpload, asm: "MOVB"}, // load byte from arg0+auxint. arg1=mem
{name: "MOVBQZXload", reg: gpload}, // ditto, extend to uint64 {name: "MOVBQZXload", reg: gpload}, // ditto, extend to uint64
......
...@@ -40,7 +40,7 @@ ...@@ -40,7 +40,7 @@
(Store dst (Load <t> src mem) mem) && t.Size() > 8 -> (Move [t.Size()] dst src mem) (Store dst (Load <t> src mem) mem) && t.Size() > 8 -> (Move [t.Size()] dst src mem)
// string ops // string ops
(Const <t> {s}) && t.IsString() -> (StringMake (OffPtr <TypeBytePtr> [2*config.ptrSize] (Global <TypeBytePtr> {config.fe.StringSym(s.(string))})) (Const <config.Uintptr> [int64(len(s.(string)))])) // TODO: ptr (Const <t> {s}) && t.IsString() -> (StringMake (OffPtr <TypeBytePtr> [2*config.ptrSize] (Addr <TypeBytePtr> {config.fe.StringSym(s.(string))} (SB <config.Uintptr>))) (Const <config.Uintptr> [int64(len(s.(string)))])) // TODO: ptr
(Load <t> ptr mem) && t.IsString() -> (StringMake (Load <TypeBytePtr> ptr mem) (Load <config.Uintptr> (OffPtr <TypeBytePtr> [config.ptrSize] ptr) mem)) (Load <t> ptr mem) && t.IsString() -> (StringMake (Load <TypeBytePtr> ptr mem) (Load <config.Uintptr> (OffPtr <TypeBytePtr> [config.ptrSize] ptr) mem))
(StringPtr (StringMake ptr _)) -> ptr (StringPtr (StringMake ptr _)) -> ptr
(StringLen (StringMake _ len)) -> len (StringLen (StringMake _ len)) -> len
......
...@@ -30,11 +30,17 @@ var genericOps = []opData{ ...@@ -30,11 +30,17 @@ var genericOps = []opData{
{name: "Const"}, {name: "Const"},
// Constant-like things // Constant-like things
{name: "Arg"}, // address of a function parameter/result. Memory input is an arg called ".mem". aux is a string (TODO: make it something other than a string?) {name: "Arg"}, // memory input to the function.
{name: "Global"}, // the address of a global variable aux.(*gc.Sym)
{name: "SP"}, // stack pointer // The address of a variable. arg0 is the base pointer (SB or SP, depending
{name: "FP"}, // frame pointer // on whether it is a global or stack variable). The Aux field identifies the
{name: "Func"}, // entry address of a function // variable. It will be either an *ExternSymbol (with arg0=SB), *ArgSymbol (arg0=SP),
// or *AutoSymbol (arg0=SP).
{name: "Addr"}, // Address of a variable. Arg0=SP or SB. Aux identifies the variable.
{name: "SP"}, // stack pointer
{name: "SB"}, // static base pointer (a.k.a. globals pointer)
{name: "Func"}, // entry address of a function
// Memory operations // Memory operations
{name: "Load"}, // Load from arg0. arg1=memory {name: "Load"}, // Load from arg0. arg1=memory
......
...@@ -12,7 +12,7 @@ func lower(f *Func) { ...@@ -12,7 +12,7 @@ func lower(f *Func) {
// Check for unlowered opcodes, fail if we find one. // Check for unlowered opcodes, fail if we find one.
for _, b := range f.Blocks { for _, b := range f.Blocks {
for _, v := range b.Values { for _, v := range b.Values {
if opcodeTable[v.Op].generic && v.Op != OpFP && v.Op != OpSP && v.Op != OpArg && v.Op != OpCopy && v.Op != OpPhi { if opcodeTable[v.Op].generic && v.Op != OpSP && v.Op != OpSB && v.Op != OpArg && v.Op != OpCopy && v.Op != OpPhi {
f.Unimplementedf("%s not lowered", v.LongString()) f.Unimplementedf("%s not lowered", v.LongString())
} }
} }
......
This diff is collapsed.
...@@ -54,7 +54,7 @@ var registers = [...]Register{ ...@@ -54,7 +54,7 @@ var registers = [...]Register{
Register{29, "X13"}, Register{29, "X13"},
Register{30, "X14"}, Register{30, "X14"},
Register{31, "X15"}, Register{31, "X15"},
Register{32, "FP"}, // pseudo-register, actually a constant offset from SP Register{32, "SB"}, // pseudo-register for global base pointer (aka %rip)
Register{33, "FLAGS"}, Register{33, "FLAGS"},
// TODO: make arch-dependent // TODO: make arch-dependent
...@@ -101,15 +101,15 @@ func regalloc(f *Func) { ...@@ -101,15 +101,15 @@ func regalloc(f *Func) {
var oldSched []*Value var oldSched []*Value
// Hack to find fp, sp Values and assign them a register. (TODO: make not so hacky) // Hack to find sp and sb Values and assign them a register. (TODO: make not so hacky)
var fp, sp *Value var sp, sb *Value
for _, v := range f.Entry.Values { for _, v := range f.Entry.Values {
switch v.Op { switch v.Op {
case OpSP: case OpSP:
sp = v sp = v
home = setloc(home, v, &registers[4]) // TODO: arch-dependent home = setloc(home, v, &registers[4]) // TODO: arch-dependent
case OpFP: case OpSB:
fp = v sb = v
home = setloc(home, v, &registers[32]) // TODO: arch-dependent home = setloc(home, v, &registers[32]) // TODO: arch-dependent
} }
} }
...@@ -147,7 +147,7 @@ func regalloc(f *Func) { ...@@ -147,7 +147,7 @@ func regalloc(f *Func) {
// TODO: hack: initialize fixed registers // TODO: hack: initialize fixed registers
regs[4] = regInfo{sp, sp, false} regs[4] = regInfo{sp, sp, false}
regs[32] = regInfo{fp, fp, false} regs[32] = regInfo{sb, sb, false}
var used regMask // has a 1 for each non-nil entry in regs var used regMask // has a 1 for each non-nil entry in regs
var dirty regMask // has a 1 for each dirty entry in regs var dirty regMask // has a 1 for each dirty entry in regs
...@@ -193,7 +193,7 @@ func regalloc(f *Func) { ...@@ -193,7 +193,7 @@ func regalloc(f *Func) {
// nospill contains registers that we can't spill because // nospill contains registers that we can't spill because
// we already set them up for use by the current instruction. // we already set them up for use by the current instruction.
var nospill regMask var nospill regMask
nospill |= 0x100000010 // SP and FP can't be spilled (TODO: arch-specific) nospill |= 0x100000010 // SP & SB can't be spilled (TODO: arch-specific)
// Move inputs into registers // Move inputs into registers
for _, o := range order { for _, o := range order {
...@@ -257,13 +257,15 @@ func regalloc(f *Func) { ...@@ -257,13 +257,15 @@ func regalloc(f *Func) {
var c *Value var c *Value
if len(w.Args) == 0 { if len(w.Args) == 0 {
// Materialize w // Materialize w
if w.Op == OpFP || w.Op == OpSP || w.Op == OpGlobal { if w.Op == OpSB {
c = w
} else if w.Op == OpSP {
c = b.NewValue1(w.Line, OpCopy, w.Type, w) c = b.NewValue1(w.Line, OpCopy, w.Type, w)
} else { } else {
c = b.NewValue0IA(w.Line, w.Op, w.Type, w.AuxInt, w.Aux) c = b.NewValue0IA(w.Line, w.Op, w.Type, w.AuxInt, w.Aux)
} }
} else if len(w.Args) == 1 && (w.Args[0].Op == OpFP || w.Args[0].Op == OpSP || w.Args[0].Op == OpGlobal) { } else if len(w.Args) == 1 && (w.Args[0].Op == OpSP || w.Args[0].Op == OpSB) {
// Materialize offsets from SP/FP/Global // Materialize offsets from SP/SB
c = b.NewValue1IA(w.Line, w.Op, w.Type, w.AuxInt, w.Aux, w.Args[0]) c = b.NewValue1IA(w.Line, w.Op, w.Type, w.AuxInt, w.Aux, w.Args[0])
} else if wreg != 0 { } else if wreg != 0 {
// Copy from another register. // Copy from another register.
......
...@@ -99,6 +99,17 @@ func addOff(x, y int64) int64 { ...@@ -99,6 +99,17 @@ func addOff(x, y int64) int64 {
return z return z
} }
func mergeSym(x, y interface{}) interface{} {
if x == nil {
return y
}
if y == nil {
return x
}
panic(fmt.Sprintf("mergeSym with two non-nil syms %s %s", x, y))
return nil
}
func inBounds(idx, len int64) bool { func inBounds(idx, len int64) bool {
return idx >= 0 && idx < len return idx >= 0 && idx < len
} }
...@@ -344,6 +344,24 @@ func rewriteValueAMD64(v *Value, config *Config) bool { ...@@ -344,6 +344,24 @@ func rewriteValueAMD64(v *Value, config *Config) bool {
goto end858e823866524b81b4636f7dd7e8eefe goto end858e823866524b81b4636f7dd7e8eefe
end858e823866524b81b4636f7dd7e8eefe: end858e823866524b81b4636f7dd7e8eefe:
; ;
case OpAddr:
// match: (Addr {sym} base)
// cond:
// result: (LEAQ {sym} base)
{
sym := v.Aux
base := v.Args[0]
v.Op = OpAMD64LEAQ
v.AuxInt = 0
v.Aux = nil
v.resetArgs()
v.Aux = sym
v.AddArg(base)
return true
}
goto end53cad0c3c9daa5575680e77c14e05e72
end53cad0c3c9daa5575680e77c14e05e72:
;
case OpAMD64CMOVQCC: case OpAMD64CMOVQCC:
// match: (CMOVQCC (CMPQconst [c] (MOVQconst [d])) _ x) // match: (CMOVQCC (CMPQconst [c] (MOVQconst [d])) _ x)
// cond: inBounds(d, c) // cond: inBounds(d, c)
...@@ -501,22 +519,6 @@ func rewriteValueAMD64(v *Value, config *Config) bool { ...@@ -501,22 +519,6 @@ func rewriteValueAMD64(v *Value, config *Config) bool {
goto endcc7894224d4f6b0bcabcece5d0185912 goto endcc7894224d4f6b0bcabcece5d0185912
endcc7894224d4f6b0bcabcece5d0185912: endcc7894224d4f6b0bcabcece5d0185912:
; ;
case OpGlobal:
// match: (Global {sym})
// cond:
// result: (LEAQglobal {sym})
{
sym := v.Aux
v.Op = OpAMD64LEAQglobal
v.AuxInt = 0
v.Aux = nil
v.resetArgs()
v.Aux = sym
return true
}
goto end8f47b6f351fecaeded45abbe5c2beec0
end8f47b6f351fecaeded45abbe5c2beec0:
;
case OpIsInBounds: case OpIsInBounds:
// match: (IsInBounds idx len) // match: (IsInBounds idx len)
// cond: // cond:
...@@ -769,6 +771,35 @@ func rewriteValueAMD64(v *Value, config *Config) bool { ...@@ -769,6 +771,35 @@ func rewriteValueAMD64(v *Value, config *Config) bool {
} }
goto end843d29b538c4483b432b632e5666d6e3 goto end843d29b538c4483b432b632e5666d6e3
end843d29b538c4483b432b632e5666d6e3: end843d29b538c4483b432b632e5666d6e3:
;
// match: (MOVQload [off1] {sym1} (LEAQ [off2] {sym2} base) mem)
// cond: (sym1 == nil || sym2 == nil)
// result: (MOVQload [addOff(off1,off2)] {mergeSym(sym1,sym2)} base mem)
{
off1 := v.AuxInt
sym1 := v.Aux
if v.Args[0].Op != OpAMD64LEAQ {
goto end227426af95e74caddcf59fdcd30ca8bc
}
off2 := v.Args[0].AuxInt
sym2 := v.Args[0].Aux
base := v.Args[0].Args[0]
mem := v.Args[1]
if !(sym1 == nil || sym2 == nil) {
goto end227426af95e74caddcf59fdcd30ca8bc
}
v.Op = OpAMD64MOVQload
v.AuxInt = 0
v.Aux = nil
v.resetArgs()
v.AuxInt = addOff(off1, off2)
v.Aux = mergeSym(sym1, sym2)
v.AddArg(base)
v.AddArg(mem)
return true
}
goto end227426af95e74caddcf59fdcd30ca8bc
end227426af95e74caddcf59fdcd30ca8bc:
; ;
// match: (MOVQload [off1] (LEAQ8 [off2] ptr idx) mem) // match: (MOVQload [off1] (LEAQ8 [off2] ptr idx) mem)
// cond: // cond:
...@@ -846,6 +877,37 @@ func rewriteValueAMD64(v *Value, config *Config) bool { ...@@ -846,6 +877,37 @@ func rewriteValueAMD64(v *Value, config *Config) bool {
} }
goto end2108c693a43c79aed10b9246c39c80aa goto end2108c693a43c79aed10b9246c39c80aa
end2108c693a43c79aed10b9246c39c80aa: end2108c693a43c79aed10b9246c39c80aa:
;
// match: (MOVQstore [off1] {sym1} (LEAQ [off2] {sym2} base) val mem)
// cond: (sym1 == nil || sym2 == nil)
// result: (MOVQstore [addOff(off1,off2)] {mergeSym(sym1,sym2)} base val mem)
{
off1 := v.AuxInt
sym1 := v.Aux
if v.Args[0].Op != OpAMD64LEAQ {
goto end5061f48193268a5eb1e1740bdd23c43d
}
off2 := v.Args[0].AuxInt
sym2 := v.Args[0].Aux
base := v.Args[0].Args[0]
val := v.Args[1]
mem := v.Args[2]
if !(sym1 == nil || sym2 == nil) {
goto end5061f48193268a5eb1e1740bdd23c43d
}
v.Op = OpAMD64MOVQstore
v.AuxInt = 0
v.Aux = nil
v.resetArgs()
v.AuxInt = addOff(off1, off2)
v.Aux = mergeSym(sym1, sym2)
v.AddArg(base)
v.AddArg(val)
v.AddArg(mem)
return true
}
goto end5061f48193268a5eb1e1740bdd23c43d
end5061f48193268a5eb1e1740bdd23c43d:
; ;
// match: (MOVQstore [off1] (LEAQ8 [off2] ptr idx) val mem) // match: (MOVQstore [off1] (LEAQ8 [off2] ptr idx) val mem)
// cond: // cond:
......
...@@ -60,12 +60,12 @@ func rewriteValuegeneric(v *Value, config *Config) bool { ...@@ -60,12 +60,12 @@ func rewriteValuegeneric(v *Value, config *Config) bool {
case OpConst: case OpConst:
// match: (Const <t> {s}) // match: (Const <t> {s})
// cond: t.IsString() // cond: t.IsString()
// result: (StringMake (OffPtr <TypeBytePtr> [2*config.ptrSize] (Global <TypeBytePtr> {config.fe.StringSym(s.(string))})) (Const <config.Uintptr> [int64(len(s.(string)))])) // result: (StringMake (OffPtr <TypeBytePtr> [2*config.ptrSize] (Addr <TypeBytePtr> {config.fe.StringSym(s.(string))} (SB <config.Uintptr>))) (Const <config.Uintptr> [int64(len(s.(string)))]))
{ {
t := v.Type t := v.Type
s := v.Aux s := v.Aux
if !(t.IsString()) { if !(t.IsString()) {
goto end6d6321106a054a5984b2ed0acec52a5b goto end55cd8fd3b98a2459d0ee9d6cbb456b01
} }
v.Op = OpStringMake v.Op = OpStringMake
v.AuxInt = 0 v.AuxInt = 0
...@@ -74,19 +74,22 @@ func rewriteValuegeneric(v *Value, config *Config) bool { ...@@ -74,19 +74,22 @@ func rewriteValuegeneric(v *Value, config *Config) bool {
v0 := v.Block.NewValue0(v.Line, OpOffPtr, TypeInvalid) v0 := v.Block.NewValue0(v.Line, OpOffPtr, TypeInvalid)
v0.Type = TypeBytePtr v0.Type = TypeBytePtr
v0.AuxInt = 2 * config.ptrSize v0.AuxInt = 2 * config.ptrSize
v1 := v.Block.NewValue0(v.Line, OpGlobal, TypeInvalid) v1 := v.Block.NewValue0(v.Line, OpAddr, TypeInvalid)
v1.Type = TypeBytePtr v1.Type = TypeBytePtr
v1.Aux = config.fe.StringSym(s.(string)) v1.Aux = config.fe.StringSym(s.(string))
v2 := v.Block.NewValue0(v.Line, OpSB, TypeInvalid)
v2.Type = config.Uintptr
v1.AddArg(v2)
v0.AddArg(v1) v0.AddArg(v1)
v.AddArg(v0) v.AddArg(v0)
v2 := v.Block.NewValue0(v.Line, OpConst, TypeInvalid) v3 := v.Block.NewValue0(v.Line, OpConst, TypeInvalid)
v2.Type = config.Uintptr v3.Type = config.Uintptr
v2.AuxInt = int64(len(s.(string))) v3.AuxInt = int64(len(s.(string)))
v.AddArg(v2) v.AddArg(v3)
return true return true
} }
goto end6d6321106a054a5984b2ed0acec52a5b goto end55cd8fd3b98a2459d0ee9d6cbb456b01
end6d6321106a054a5984b2ed0acec52a5b: end55cd8fd3b98a2459d0ee9d6cbb456b01:
; ;
case OpIsInBounds: case OpIsInBounds:
// match: (IsInBounds (Const [c]) (Const [d])) // match: (IsInBounds (Const [c]) (Const [d]))
......
...@@ -29,9 +29,9 @@ func makeConstShiftFunc(c *Config, amount int64, op Op, typ Type) fun { ...@@ -29,9 +29,9 @@ func makeConstShiftFunc(c *Config, amount int64, op Op, typ Type) fun {
fun := Fun(c, "entry", fun := Fun(c, "entry",
Bloc("entry", Bloc("entry",
Valu("mem", OpArg, TypeMem, 0, ".mem"), Valu("mem", OpArg, TypeMem, 0, ".mem"),
Valu("FP", OpFP, TypeUInt64, 0, nil), Valu("SP", OpSP, TypeUInt64, 0, nil),
Valu("argptr", OpOffPtr, ptyp, 8, nil, "FP"), Valu("argptr", OpOffPtr, ptyp, 8, nil, "SP"),
Valu("resptr", OpOffPtr, ptyp, 16, nil, "FP"), Valu("resptr", OpOffPtr, ptyp, 16, nil, "SP"),
Valu("load", OpLoad, typ, 0, nil, "argptr", "mem"), Valu("load", OpLoad, typ, 0, nil, "argptr", "mem"),
Valu("c", OpConst, TypeUInt64, amount, nil), Valu("c", OpConst, TypeUInt64, amount, nil),
Valu("shift", op, typ, 0, nil, "load", "c"), Valu("shift", op, typ, 0, nil, "load", "c"),
......
...@@ -54,7 +54,7 @@ func stackalloc(f *Func) { ...@@ -54,7 +54,7 @@ func stackalloc(f *Func) {
// v will have been materialized wherever it is needed. // v will have been materialized wherever it is needed.
continue continue
} }
if len(v.Args) == 1 && (v.Args[0].Op == OpFP || v.Args[0].Op == OpSP || v.Args[0].Op == OpGlobal) { if len(v.Args) == 1 && (v.Args[0].Op == OpSP || v.Args[0].Op == OpSB) {
continue continue
} }
n = align(n, v.Type.Alignment()) n = align(n, v.Type.Alignment())
...@@ -64,54 +64,26 @@ func stackalloc(f *Func) { ...@@ -64,54 +64,26 @@ func stackalloc(f *Func) {
} }
} }
// Finally, allocate space for all autos that we used
for _, b := range f.Blocks {
for _, v := range b.Values {
s, ok := v.Aux.(*AutoSymbol)
if !ok || s.Offset >= 0 {
continue
}
t := s.Typ
n = align(n, t.Alignment())
s.Offset = n
n += t.Size()
}
}
n = align(n, f.Config.ptrSize) n = align(n, f.Config.ptrSize)
n += f.Config.ptrSize // space for return address. TODO: arch-dependent n += f.Config.ptrSize // space for return address. TODO: arch-dependent
f.RegAlloc = home f.RegAlloc = home
f.FrameSize = n f.FrameSize = n
// TODO: share stack slots among noninterfering (& gc type compatible) values // TODO: share stack slots among noninterfering (& gc type compatible) values
// adjust all uses of FP to SP now that we have the frame size.
var fp *Value
for _, b := range f.Blocks {
for _, v := range b.Values {
if v.Op == OpFP {
if fp != nil {
b.Fatalf("multiple FP ops: %s %s", fp, v)
}
fp = v
}
for i, a := range v.Args {
if a.Op != OpFP {
continue
}
// TODO: do this with arch-specific rewrite rules somehow?
switch v.Op {
case OpAMD64ADDQ:
// (ADDQ (FP) x) -> (LEAQ [n] (SP) x)
v.Op = OpAMD64LEAQ
v.AuxInt = n
case OpAMD64ADDQconst:
// TODO(matloob): Add LEAQconst op
v.AuxInt = addOff(v.AuxInt, n)
case OpAMD64LEAQ, OpAMD64MOVQload, OpAMD64MOVQstore, OpAMD64MOVLload, OpAMD64MOVLstore, OpAMD64MOVWload, OpAMD64MOVWstore, OpAMD64MOVBload, OpAMD64MOVBstore, OpAMD64MOVQloadidx8:
if v.Op == OpAMD64MOVQloadidx8 && i == 1 {
// Note: we could do it, but it is probably an error
f.Fatalf("can't do FP->SP adjust on index slot of load %s", v.Op)
}
// eg: (MOVQload [c] (FP) mem) -> (MOVQload [c+n] (SP) mem)
v.AuxInt = addOff(v.AuxInt, n)
default:
f.Unimplementedf("can't do FP->SP adjust on %s", v.Op)
// TODO: OpCopy -> ADDQ
}
}
}
}
if fp != nil {
fp.Op = OpSP
home[fp.ID] = &registers[4] // TODO: arch-dependent
}
} }
// align increases n to the next multiple of a. a must be a power of 2. // align increases n to the next multiple of a. a must be a power of 2.
......
...@@ -114,3 +114,40 @@ func (v *Value) resetArgs() { ...@@ -114,3 +114,40 @@ func (v *Value) resetArgs() {
func (v *Value) Logf(msg string, args ...interface{}) { v.Block.Logf(msg, args...) } func (v *Value) Logf(msg string, args ...interface{}) { v.Block.Logf(msg, args...) }
func (v *Value) Fatalf(msg string, args ...interface{}) { v.Block.Fatalf(msg, args...) } func (v *Value) Fatalf(msg string, args ...interface{}) { v.Block.Fatalf(msg, args...) }
func (v *Value) Unimplementedf(msg string, args ...interface{}) { v.Block.Unimplementedf(msg, args...) } func (v *Value) Unimplementedf(msg string, args ...interface{}) { v.Block.Unimplementedf(msg, args...) }
// ExternSymbol is an aux value that encodes a variable's
// constant offset from the static base pointer.
type ExternSymbol struct {
Typ Type // Go type
Sym fmt.Stringer // A *gc.Sym referring to a global variable
// Note: the offset for an external symbol is not
// calculated until link time.
}
// ArgSymbol is an aux value that encodes an argument or result
// variable's constant offset from FP (FP = SP + framesize).
type ArgSymbol struct {
Typ Type // Go type
Offset int64 // Distance above frame pointer
Sym fmt.Stringer // A *gc.Sym referring to the argument/result variable.
}
// AutoSymbol is an aux value that encodes a local variable's
// constant offset from SP.
type AutoSymbol struct {
Typ Type // Go type
Offset int64 // Distance above stack pointer. Set by stackalloc in SSA.
Sym fmt.Stringer // A *gc.Sym referring to a local (auto) variable.
}
func (s *ExternSymbol) String() string {
return s.Sym.String()
}
func (s *ArgSymbol) String() string {
return s.Sym.String()
}
func (s *AutoSymbol) String() string {
return s.Sym.String()
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment