Commit 379d2dea authored by Martin Möhrmann's avatar Martin Möhrmann

cmd/compile: remove superfluous signed right shift used for signed division by 2

A signed right shift before an unsigned right shift by register width-1
(extracts the sign bit) is superflous.

trigger counts during ./make.bash
 0   (Rsh8U  (Rsh8  x _) 7  ) -> (Rsh8U  x 7 )
 0   (Rsh16U (Rsh16 x _) 15 ) -> (Rsh16U x 15)
 2   (Rsh32U (Rsh32 x _) 31 ) -> (Rsh32U x 31)
 251 (Rsh64U (Rsh64 x _) 63 ) -> (Rsh64U x 63)

Changes the instructions generated on AMD64 for x / 2 where
x is a signed integer from:

 MOVQ    AX, CX
 SARQ    $63, AX
 SHRQ    $63, AX
 ADDQ    CX, AX
 SARQ    $1, AX

to:

 MOVQ    AX, CX
 SHRQ    $63, AX
 ADDQ    CX, AX
 SARQ    $1, AX

Change-Id: I86321ae8fc9dc24b8fa9eb80aa5c7299eff8c9dc
Reviewed-on: https://go-review.googlesource.com/115956
Run-TryBot: Martin Möhrmann <moehrmann@google.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: 's avatarKeith Randall <khr@golang.org>
parent 84374d4d
......@@ -371,6 +371,12 @@
(Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh16Ux64 x (Const64 <t> [c+d]))
(Rsh8Ux64 <t> (Rsh8Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh8Ux64 x (Const64 <t> [c+d]))
// Remove signed right shift before an unsigned right shift that extracts the sign bit.
(Rsh8Ux64 (Rsh8x64 x _) (Const64 <t> [7] )) -> (Rsh8Ux64 x (Const64 <t> [7] ))
(Rsh16Ux64 (Rsh16x64 x _) (Const64 <t> [15])) -> (Rsh16Ux64 x (Const64 <t> [15]))
(Rsh32Ux64 (Rsh32x64 x _) (Const64 <t> [31])) -> (Rsh32Ux64 x (Const64 <t> [31]))
(Rsh64Ux64 (Rsh64x64 x _) (Const64 <t> [63])) -> (Rsh64Ux64 x (Const64 <t> [63]))
// ((x >> c1) << c2) >> c3
(Rsh(64|32|16|8)Ux64 (Lsh(64|32|16|8)x64 (Rsh(64|32|16|8)Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
&& uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
......
......@@ -24905,6 +24905,32 @@ func rewriteValuegeneric_OpRsh16Ux64_0(v *Value) bool {
v.AddArg(v0)
return true
}
// match: (Rsh16Ux64 (Rsh16x64 x _) (Const64 <t> [15]))
// cond:
// result: (Rsh16Ux64 x (Const64 <t> [15]))
for {
_ = v.Args[1]
v_0 := v.Args[0]
if v_0.Op != OpRsh16x64 {
break
}
_ = v_0.Args[1]
x := v_0.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpConst64 {
break
}
t := v_1.Type
if v_1.AuxInt != 15 {
break
}
v.reset(OpRsh16Ux64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = 15
v.AddArg(v0)
return true
}
// match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Rsh16Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
......@@ -25449,6 +25475,32 @@ func rewriteValuegeneric_OpRsh32Ux64_0(v *Value) bool {
v.AddArg(v0)
return true
}
// match: (Rsh32Ux64 (Rsh32x64 x _) (Const64 <t> [31]))
// cond:
// result: (Rsh32Ux64 x (Const64 <t> [31]))
for {
_ = v.Args[1]
v_0 := v.Args[0]
if v_0.Op != OpRsh32x64 {
break
}
_ = v_0.Args[1]
x := v_0.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpConst64 {
break
}
t := v_1.Type
if v_1.AuxInt != 31 {
break
}
v.reset(OpRsh32Ux64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = 31
v.AddArg(v0)
return true
}
// match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Rsh32Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
......@@ -26055,6 +26107,32 @@ func rewriteValuegeneric_OpRsh64Ux64_0(v *Value) bool {
v.AddArg(v0)
return true
}
// match: (Rsh64Ux64 (Rsh64x64 x _) (Const64 <t> [63]))
// cond:
// result: (Rsh64Ux64 x (Const64 <t> [63]))
for {
_ = v.Args[1]
v_0 := v.Args[0]
if v_0.Op != OpRsh64x64 {
break
}
_ = v_0.Args[1]
x := v_0.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpConst64 {
break
}
t := v_1.Type
if v_1.AuxInt != 63 {
break
}
v.reset(OpRsh64Ux64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = 63
v.AddArg(v0)
return true
}
// match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Rsh64Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
......@@ -26723,6 +26801,32 @@ func rewriteValuegeneric_OpRsh8Ux64_0(v *Value) bool {
v.AddArg(v0)
return true
}
// match: (Rsh8Ux64 (Rsh8x64 x _) (Const64 <t> [7]))
// cond:
// result: (Rsh8Ux64 x (Const64 <t> [7] ))
for {
_ = v.Args[1]
v_0 := v.Args[0]
if v_0.Op != OpRsh8x64 {
break
}
_ = v_0.Args[1]
x := v_0.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpConst64 {
break
}
t := v_1.Type
if v_1.AuxInt != 7 {
break
}
v.reset(OpRsh8Ux64)
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpConst64, t)
v0.AuxInt = 7
v.AddArg(v0)
return true
}
// match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Rsh8Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment