Commit 7f27f1df authored by Cherry Zhang's avatar Cherry Zhang

cmd/compile: add MIPS64 optimizations, SSA on by default

Add the following optimizations:
- fold constants
- fold address into load/store
- simplify extensions and conditional branches
- remove nil checks

Turn on SSA on MIPS64 by default, and toggle the tests.

Fixes #16359.

Change-Id: I7f1e38c2509e22e42cd024e712990ebbe47176bd
Reviewed-on: https://go-review.googlesource.com/27870
Run-TryBot: Cherry Zhang <cherryyz@google.com>
Reviewed-by: 's avatarDavid Chase <drchase@google.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
parent 9f7ea616
......@@ -40,7 +40,7 @@ func shouldssa(fn *Node) bool {
if os.Getenv("SSATEST") == "" {
return false
}
case "amd64", "amd64p32", "arm", "386", "arm64", "ppc64le":
case "amd64", "amd64p32", "arm", "386", "arm64", "ppc64le", "mips64", "mips64le":
// Generally available.
}
if !ssaEnabled {
......
......@@ -421,7 +421,32 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
ssa.OpMIPS64MOVHUreg,
ssa.OpMIPS64MOVWreg,
ssa.OpMIPS64MOVWUreg:
// TODO: remove extension if after proper load
a := v.Args[0]
for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg {
a = a.Args[0]
}
if a.Op == ssa.OpLoadReg {
t := a.Type
switch {
case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(),
v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(),
v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(),
v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned():
// arg is a proper-typed load, already zero/sign-extended, don't extend again
if gc.SSARegNum(v) == gc.SSARegNum(v.Args[0]) {
return
}
p := gc.Prog(mips.AMOVV)
p.From.Type = obj.TYPE_REG
p.From.Reg = gc.SSARegNum(v.Args[0])
p.To.Type = obj.TYPE_REG
p.To.Reg = gc.SSARegNum(v)
return
default:
}
}
fallthrough
case ssa.OpMIPS64MOVWF,
ssa.OpMIPS64MOVWD,
......@@ -613,7 +638,64 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
gc.Maxarg = v.AuxInt
}
case ssa.OpMIPS64LoweredNilCheck:
// TODO: optimization
// Optimization - if the subsequent block has a load or store
// at the same address, we don't need to issue this instruction.
mem := v.Args[1]
for _, w := range v.Block.Succs[0].Block().Values {
if w.Op == ssa.OpPhi {
if w.Type.IsMemory() {
mem = w
}
continue
}
if len(w.Args) == 0 || !w.Args[len(w.Args)-1].Type.IsMemory() {
// w doesn't use a store - can't be a memory op.
continue
}
if w.Args[len(w.Args)-1] != mem {
v.Fatalf("wrong store after nilcheck v=%s w=%s", v, w)
}
switch w.Op {
case ssa.OpMIPS64MOVBload, ssa.OpMIPS64MOVBUload, ssa.OpMIPS64MOVHload, ssa.OpMIPS64MOVHUload,
ssa.OpMIPS64MOVWload, ssa.OpMIPS64MOVWUload, ssa.OpMIPS64MOVVload,
ssa.OpMIPS64MOVFload, ssa.OpMIPS64MOVDload,
ssa.OpMIPS64MOVBstore, ssa.OpMIPS64MOVHstore, ssa.OpMIPS64MOVWstore, ssa.OpMIPS64MOVVstore,
ssa.OpMIPS64MOVFstore, ssa.OpMIPS64MOVDstore:
// arg0 is ptr, auxint is offset
if w.Args[0] == v.Args[0] && w.Aux == nil && w.AuxInt >= 0 && w.AuxInt < minZeroPage {
if gc.Debug_checknil != 0 && int(v.Line) > 1 {
gc.Warnl(v.Line, "removed nil check")
}
return
}
case ssa.OpMIPS64DUFFZERO, ssa.OpMIPS64LoweredZero:
// arg0 is ptr
if w.Args[0] == v.Args[0] {
if gc.Debug_checknil != 0 && int(v.Line) > 1 {
gc.Warnl(v.Line, "removed nil check")
}
return
}
case ssa.OpMIPS64LoweredMove:
// arg0 is dst ptr, arg1 is src ptr
if w.Args[0] == v.Args[0] || w.Args[1] == v.Args[0] {
if gc.Debug_checknil != 0 && int(v.Line) > 1 {
gc.Warnl(v.Line, "removed nil check")
}
return
}
default:
}
if w.Type.IsMemory() {
if w.Op == ssa.OpVarDef || w.Op == ssa.OpVarKill || w.Op == ssa.OpVarLive {
// these ops are OK
mem = w
continue
}
// We can't delay the nil check past the next store.
break
}
}
// Issue a load which will fault if arg is nil.
p := gc.Prog(mips.AMOVB)
p.From.Type = obj.TYPE_MEM
......
......@@ -201,9 +201,9 @@ func init() {
// comparisons
{name: "SGT", argLength: 2, reg: gp21, asm: "SGT", typ: "Bool"}, // 1 if arg0 > arg1 (signed), 0 otherwise
{name: "SGTconst", argLength: 2, reg: gp21, asm: "SGT", aux: "Int64", typ: "Bool"}, // 1 if arg0 > auxInt (signed), 0 otherwise
{name: "SGTconst", argLength: 1, reg: gp11, asm: "SGT", aux: "Int64", typ: "Bool"}, // 1 if auxInt > arg0 (signed), 0 otherwise
{name: "SGTU", argLength: 2, reg: gp21, asm: "SGTU", typ: "Bool"}, // 1 if arg0 > arg1 (unsigned), 0 otherwise
{name: "SGTUconst", argLength: 2, reg: gp21, asm: "SGTU", aux: "Int64", typ: "Bool"}, // 1 if arg0 > auxInt (unsigned), 0 otherwise
{name: "SGTUconst", argLength: 1, reg: gp11, asm: "SGTU", aux: "Int64", typ: "Bool"}, // 1 if auxInt > arg0 (unsigned), 0 otherwise
{name: "CMPEQF", argLength: 2, reg: fp2flags, asm: "CMPEQF", typ: "Flags"}, // flags=true if arg0 = arg1, float32
{name: "CMPEQD", argLength: 2, reg: fp2flags, asm: "CMPEQD", typ: "Flags"}, // flags=true if arg0 = arg1, float64
......
......@@ -12421,12 +12421,11 @@ var opcodeTable = [...]opInfo{
{
name: "SGTconst",
auxType: auxInt64,
argLen: 2,
argLen: 1,
asm: mips.ASGT,
reg: regInfo{
inputs: []inputInfo{
{0, 100663294}, // R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R18 R19 R20 R21 R22 R24 R25 g
{1, 100663294}, // R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R18 R19 R20 R21 R22 R24 R25 g
},
outputs: []outputInfo{
{0, 33554430}, // R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R18 R19 R20 R21 R22 R24 R25
......@@ -12450,12 +12449,11 @@ var opcodeTable = [...]opInfo{
{
name: "SGTUconst",
auxType: auxInt64,
argLen: 2,
argLen: 1,
asm: mips.ASGTU,
reg: regInfo{
inputs: []inputInfo{
{0, 100663294}, // R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R18 R19 R20 R21 R22 R24 R25 g
{1, 100663294}, // R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R18 R19 R20 R21 R22 R24 R25 g
},
outputs: []outputInfo{
{0, 33554430}, // R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 R16 R17 R18 R19 R20 R21 R22 R24 R25
......
// +build !amd64,!arm,!amd64p32,!386,!arm64,!ppc64le
// +build !amd64,!arm,!amd64p32,!386,!arm64,!ppc64le,!mips64,!mips64le
// errorcheck -0 -l -live -wb=0
// Copyright 2014 The Go Authors. All rights reserved.
......
// +build amd64 arm amd64p32 386 arm64
// +build amd64 arm amd64p32 386 arm64 mips64 mips64le
// errorcheck -0 -l -live -wb=0
// Copyright 2014 The Go Authors. All rights reserved.
......
// errorcheck -0 -d=nil
// +build amd64 arm amd64p32 386 arm64
// +build amd64 arm amd64p32 386 arm64 mips64 mips64le
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
......
// +build !amd64,!arm,!amd64p32,!386,!arm64,!ppc64le
// +build !amd64,!arm,!amd64p32,!386,!arm64,!ppc64le,!mips64,!mips64le
// errorcheck -0 -d=append,slice
// Copyright 2015 The Go Authors. All rights reserved.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment