Commit d1c15a0e authored by Keith Randall's avatar Keith Randall

[dev.ssa] cmd/compile/internal/ssa: implement ITAB

Implement ITAB, selecting the itable field of an interface.

Soften the lowering check to allow lowerings that leave
generic but dead ops behind.  (The ITAB lowering does this.)

Change-Id: Icc84961dd4060d143602f001311aa1d8be0d7fc0
Reviewed-on: https://go-review.googlesource.com/13144Reviewed-by: 's avatarJosh Bleecher Snyder <josharian@gmail.com>
parent 9495e45c
......@@ -1247,6 +1247,10 @@ func (s *state) expr(n *Node) *ssa.Value {
return s.constInt(Types[TINT], n.Left.Type.Bound)
}
case OITAB:
a := s.expr(n.Left)
return s.newValue1(ssa.OpITab, n.Type, a)
case OCALLFUNC, OCALLMETH:
left := n.Left
static := left.Op == ONAME && left.Class == PFUNC
......
......@@ -68,6 +68,7 @@ var passes = [...]pass{
{"lower", lower},
{"lowered cse", cse},
{"lowered deadcode", deadcode},
{"checkLower", checkLower},
{"critical", critical}, // remove critical edges
{"layout", layout}, // schedule blocks
{"schedule", schedule}, // schedule values
......@@ -101,6 +102,9 @@ var passOrder = [...]constraint{
{"schedule", "regalloc"},
// stack allocation requires register allocation
{"regalloc", "stackalloc"},
// checkLower must run after lowering & subsequent dead code elim
{"lower", "checkLower"},
{"lowered deadcode", "checkLower"},
}
func init() {
......
......@@ -228,6 +228,8 @@
(Addr {sym} base) -> (LEAQ {sym} base)
(ITab (Load ptr mem)) -> (MOVQload ptr mem)
// block rewrites
(If (SETL cmp) yes no) -> (LT cmp yes no)
(If (SETLE cmp) yes no) -> (LE cmp yes no)
......
......@@ -244,6 +244,9 @@ var genericOps = []opData{
{name: "StringPtr"}, // ptr(arg0)
{name: "StringLen"}, // len(arg0)
// Interfaces
{name: "ITab"}, // arg0=interface, returns itable field
// Spill&restore ops for the register allocator. These are
// semantically identical to OpCopy; they do not take/return
// stores like regular memory ops do. We can get away without memory
......
......@@ -8,8 +8,13 @@ package ssa
func lower(f *Func) {
// repeat rewrites until we find no more rewrites
applyRewrite(f, f.Config.lowerBlock, f.Config.lowerValue)
}
// Check for unlowered opcodes, fail if we find one.
// checkLower checks for unlowered opcodes and fails if we find one.
func checkLower(f *Func) {
// Needs to be a separate phase because it must run after both
// lowering and a subsequent dead code elimination (because lowering
// rules may leave dead generic ops behind).
for _, b := range f.Blocks {
for _, v := range b.Values {
if opcodeTable[v.Op].generic && v.Op != OpSP && v.Op != OpSB && v.Op != OpArg && v.Op != OpCopy && v.Op != OpPhi {
......
......@@ -370,6 +370,7 @@ const (
OpStringMake
OpStringPtr
OpStringLen
OpITab
OpStoreReg
OpLoadReg
OpFwdRef
......@@ -2773,6 +2774,10 @@ var opcodeTable = [...]opInfo{
name: "StringLen",
generic: true,
},
{
name: "ITab",
generic: true,
},
{
name: "StoreReg",
generic: true,
......
......@@ -1972,6 +1972,27 @@ func rewriteValueAMD64(v *Value, config *Config) bool {
goto end22eaafbcfe70447f79d9b3e6cc395bbd
end22eaafbcfe70447f79d9b3e6cc395bbd:
;
case OpITab:
// match: (ITab (Load ptr mem))
// cond:
// result: (MOVQload ptr mem)
{
if v.Args[0].Op != OpLoad {
goto enda49fcae3630a097c78aa58189c90a97a
}
ptr := v.Args[0].Args[0]
mem := v.Args[0].Args[1]
v.Op = OpAMD64MOVQload
v.AuxInt = 0
v.Aux = nil
v.resetArgs()
v.AddArg(ptr)
v.AddArg(mem)
return true
}
goto enda49fcae3630a097c78aa58189c90a97a
enda49fcae3630a097c78aa58189c90a97a:
;
case OpIsInBounds:
// match: (IsInBounds idx len)
// cond:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment