Commit 01b4ddb3 authored by Cherry Zhang's avatar Cherry Zhang

runtime/internal/atomic: crash on unaligned 64-bit ops on 386 and ARM

Updates #17786. Will fix mips(32) when the port is fully landed.

Change-Id: I00d4ff666ec14a38cadbcd52569b347bb5bc8b75
Reviewed-on: https://go-review.googlesource.com/33236
Run-TryBot: Cherry Zhang <cherryyz@google.com>
Reviewed-by: 's avatarIan Lance Taylor <iant@golang.org>
Reviewed-by: 's avatarBrad Fitzpatrick <bradfitz@golang.org>
parent bbe96f56
......@@ -52,6 +52,9 @@ TEXT runtime∕internal∕atomic·Xaddint64(SB), NOSPLIT, $0-20
// }
TEXT runtime∕internal∕atomic·Cas64(SB), NOSPLIT, $0-21
MOVL ptr+0(FP), BP
TESTL $7, BP
JZ 2(PC)
MOVL 0, BP // crash with nil ptr deref
MOVL old_lo+4(FP), AX
MOVL old_hi+8(FP), DX
MOVL new_lo+12(FP), BX
......
......@@ -106,6 +106,9 @@ func Store(addr *uint32, v uint32) {
//go:nosplit
func Cas64(addr *uint64, old, new uint64) bool {
if uintptr(unsafe.Pointer(addr))&7 != 0 {
*(*int)(nil) = 0 // crash on unaligned uint64
}
var ok bool
addrLock(addr).lock()
if *addr == old {
......@@ -118,6 +121,9 @@ func Cas64(addr *uint64, old, new uint64) bool {
//go:nosplit
func Xadd64(addr *uint64, delta int64) uint64 {
if uintptr(unsafe.Pointer(addr))&7 != 0 {
*(*int)(nil) = 0 // crash on unaligned uint64
}
var r uint64
addrLock(addr).lock()
r = *addr + uint64(delta)
......@@ -128,6 +134,9 @@ func Xadd64(addr *uint64, delta int64) uint64 {
//go:nosplit
func Xchg64(addr *uint64, v uint64) uint64 {
if uintptr(unsafe.Pointer(addr))&7 != 0 {
*(*int)(nil) = 0 // crash on unaligned uint64
}
var r uint64
addrLock(addr).lock()
r = *addr
......@@ -138,6 +147,9 @@ func Xchg64(addr *uint64, v uint64) uint64 {
//go:nosplit
func Load64(addr *uint64) uint64 {
if uintptr(unsafe.Pointer(addr))&7 != 0 {
*(*int)(nil) = 0 // crash on unaligned uint64
}
var r uint64
addrLock(addr).lock()
r = *addr
......@@ -147,6 +159,9 @@ func Load64(addr *uint64) uint64 {
//go:nosplit
func Store64(addr *uint64, v uint64) {
if uintptr(unsafe.Pointer(addr))&7 != 0 {
*(*int)(nil) = 0 // crash on unaligned uint64
}
addrLock(addr).lock()
*addr = v
addrLock(addr).unlock()
......
......@@ -87,8 +87,6 @@ func TestUnaligned64(t *testing.T) {
if unsafe.Sizeof(int(0)) != 4 {
t.Skip("test only runs on 32-bit systems")
}
case "arm":
t.Skipf("TODO: implement. golang.org/issue/17786")
case "amd64p32", "mips", "mipsle":
// amd64p32 and mips can handle unaligned atomics.
t.Skipf("test not needed on %v", runtime.GOARCH)
......@@ -101,4 +99,7 @@ func TestUnaligned64(t *testing.T) {
shouldPanic(t, "Load64", func() { atomic.Load64(up64) })
shouldPanic(t, "Loadint64", func() { atomic.Loadint64(p64) })
shouldPanic(t, "Store64", func() { atomic.Store64(up64, 0) })
shouldPanic(t, "Xadd64", func() { atomic.Xadd64(up64, 1) })
shouldPanic(t, "Xchg64", func() { atomic.Xchg64(up64, 1) })
shouldPanic(t, "Cas64", func() { atomic.Cas64(up64, 1, 2) })
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment