Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
G
golang
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Commits
Issue Boards
Open sidebar
go
golang
Commits
cf0d8c09
Commit
cf0d8c09
authored
Sep 08, 2011
by
Dmitriy Vyukov
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
sync/atomic: add 64-bit Load and Store
R=rsc CC=golang-dev
https://golang.org/cl/4977054
parent
571d3f50
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
362 additions
and
13 deletions
+362
-13
asm_386.s
src/pkg/sync/atomic/asm_386.s
+28
-0
asm_amd64.s
src/pkg/sync/atomic/asm_amd64.s
+18
-0
asm_arm.s
src/pkg/sync/atomic/asm_arm.s
+24
-0
asm_linux_arm.s
src/pkg/sync/atomic/asm_linux_arm.s
+12
-0
atomic_test.go
src/pkg/sync/atomic/atomic_test.go
+267
-12
doc.go
src/pkg/sync/atomic/doc.go
+13
-1
No files found.
src/pkg/sync/atomic/asm_386.s
View file @
cf0d8c09
...
@@ -98,6 +98,19 @@ TEXT ·LoadUint32(SB),7,$0
...
@@ -98,6 +98,19 @@ TEXT ·LoadUint32(SB),7,$0
MOVL AX, ret+4(FP)
MOVL AX, ret+4(FP)
RET
RET
TEXT ·LoadInt64(SB),7,$0
JMP ·LoadUint64(SB)
TEXT ·LoadUint64(SB),7,$0
MOVL addrptr+0(FP), AX
// MOVQ (%EAX), %MM0
BYTE $0x0f; BYTE $0x6f; BYTE $0x00
// MOVQ %MM0, 0x8(%ESP)
BYTE $0x0f; BYTE $0x7f; BYTE $0x44; BYTE $0x24; BYTE $0x08
// EMMS
BYTE $0x0F; BYTE $0x77
RET
TEXT ·LoadUintptr(SB),7,$0
TEXT ·LoadUintptr(SB),7,$0
JMP ·LoadUint32(SB)
JMP ·LoadUint32(SB)
...
@@ -113,6 +126,21 @@ TEXT ·StoreUint32(SB),7,$0
...
@@ -113,6 +126,21 @@ TEXT ·StoreUint32(SB),7,$0
XCHGL AX, 0(BP)
XCHGL AX, 0(BP)
RET
RET
TEXT ·StoreInt64(SB),7,$0
JMP ·StoreUint64(SB)
TEXT ·StoreUint64(SB),7,$0
MOVL addrptr+0(FP), AX
// MOVQ 0x8(%ESP), %MM0
BYTE $0x0f; BYTE $0x6f; BYTE $0x44; BYTE $0x24; BYTE $0x08
// MOVQ %MM0, (%EAX)
BYTE $0x0f; BYTE $0x7f; BYTE $0x00
// EMMS
BYTE $0x0F; BYTE $0x77
// MFENCE
BYTE $0x0f; BYTE $0xae; BYTE $0xf0
RET
TEXT ·StoreUintptr(SB),7,$0
TEXT ·StoreUintptr(SB),7,$0
JMP ·StoreUint32(SB)
JMP ·StoreUint32(SB)
...
...
src/pkg/sync/atomic/asm_amd64.s
View file @
cf0d8c09
...
@@ -70,6 +70,15 @@ TEXT ·LoadUint32(SB),7,$0
...
@@ -70,6 +70,15 @@ TEXT ·LoadUint32(SB),7,$0
MOVL AX, ret+8(FP)
MOVL AX, ret+8(FP)
RET
RET
TEXT ·LoadInt64(SB),7,$0
JMP ·LoadUint64(SB)
TEXT ·LoadUint64(SB),7,$0
MOVQ addrptr+0(FP), AX
MOVQ 0(AX), AX
MOVQ AX, ret+8(FP)
RET
TEXT ·LoadUintptr(SB),7,$0
TEXT ·LoadUintptr(SB),7,$0
JMP ·LoadPointer(SB)
JMP ·LoadPointer(SB)
...
@@ -88,6 +97,15 @@ TEXT ·StoreUint32(SB),7,$0
...
@@ -88,6 +97,15 @@ TEXT ·StoreUint32(SB),7,$0
XCHGL AX, 0(BP)
XCHGL AX, 0(BP)
RET
RET
TEXT ·StoreInt64(SB),7,$0
JMP ·StoreUint64(SB)
TEXT ·StoreUint64(SB),7,$0
MOVQ addrptr+0(FP), BP
MOVQ val+8(FP), AX
XCHGQ AX, 0(BP)
RET
TEXT ·StoreUintptr(SB),7,$0
TEXT ·StoreUintptr(SB),7,$0
JMP ·StorePointer(SB)
JMP ·StorePointer(SB)
...
...
src/pkg/sync/atomic/asm_arm.s
View file @
cf0d8c09
...
@@ -79,6 +79,30 @@ add64loop:
...
@@ -79,6 +79,30 @@ add64loop:
MOVW R5, rethi+16(FP)
MOVW R5, rethi+16(FP)
RET
RET
TEXT ·armLoadUint64(SB),7,$0
BL fastCheck64<>(SB)
MOVW addrptr+0(FP), R1
load64loop:
LDREXD (R1), R2 // loads R2 and R3
STREXD R2, (R1), R0 // stores R2 and R3
CMP $0, R0
BNE load64loop
MOVW R2, vallo+4(FP)
MOVW R3, valhi+8(FP)
RET
TEXT ·armStoreUint64(SB),7,$0
BL fastCheck64<>(SB)
MOVW addrptr+0(FP), R1
MOVW vallo+4(FP), R2
MOVW valhi+8(FP), R3
store64loop:
LDREXD (R1), R4 // loads R4 and R5
STREXD R2, (R1), R0 // stores R2 and R3
CMP $0, R0
BNE store64loop
RET
// Check for broken 64-bit LDREXD as found in QEMU.
// Check for broken 64-bit LDREXD as found in QEMU.
// LDREXD followed by immediate STREXD should succeed.
// LDREXD followed by immediate STREXD should succeed.
// If it fails, try a few times just to be sure (maybe our thread got
// If it fails, try a few times just to be sure (maybe our thread got
...
...
src/pkg/sync/atomic/asm_linux_arm.s
View file @
cf0d8c09
...
@@ -100,6 +100,12 @@ loadloop1:
...
@@ -100,6 +100,12 @@ loadloop1:
MOVW R1, val+4(FP)
MOVW R1, val+4(FP)
RET
RET
TEXT ·LoadInt64(SB),7,$0
B ·armLoadUint64(SB)
TEXT ·LoadUint64(SB),7,$0
B ·armLoadUint64(SB)
TEXT ·LoadUintptr(SB),7,$0
TEXT ·LoadUintptr(SB),7,$0
B ·LoadUint32(SB)
B ·LoadUint32(SB)
...
@@ -118,6 +124,12 @@ storeloop1:
...
@@ -118,6 +124,12 @@ storeloop1:
BCC storeloop1
BCC storeloop1
RET
RET
TEXT ·StoreInt64(SB),7,$0
B ·armStoreUint64(SB)
TEXT ·StoreUint64(SB),7,$0
B ·armStoreUint64(SB)
TEXT ·StoreUintptr(SB),7,$0
TEXT ·StoreUintptr(SB),7,$0
B ·StoreUint32(SB)
B ·StoreUint32(SB)
...
...
src/pkg/sync/atomic/atomic_test.go
View file @
cf0d8c09
...
@@ -379,6 +379,54 @@ func TestLoadUint32(t *testing.T) {
...
@@ -379,6 +379,54 @@ func TestLoadUint32(t *testing.T) {
}
}
}
}
func
TestLoadInt64
(
t
*
testing
.
T
)
{
if
test64err
!=
nil
{
t
.
Logf
(
"Skipping 64-bit tests: %v"
,
test64err
)
return
}
var
x
struct
{
before
int64
i
int64
after
int64
}
x
.
before
=
magic64
x
.
after
=
magic64
for
delta
:=
int64
(
1
);
delta
+
delta
>
delta
;
delta
+=
delta
{
k
:=
LoadInt64
(
&
x
.
i
)
if
k
!=
x
.
i
{
t
.
Fatalf
(
"delta=%d i=%d k=%d"
,
delta
,
x
.
i
,
k
)
}
x
.
i
+=
delta
}
if
x
.
before
!=
magic64
||
x
.
after
!=
magic64
{
t
.
Fatalf
(
"wrong magic: %#x _ %#x != %#x _ %#x"
,
x
.
before
,
x
.
after
,
uint64
(
magic64
),
uint64
(
magic64
))
}
}
func
TestLoadUint64
(
t
*
testing
.
T
)
{
if
test64err
!=
nil
{
t
.
Logf
(
"Skipping 64-bit tests: %v"
,
test64err
)
return
}
var
x
struct
{
before
uint64
i
uint64
after
uint64
}
x
.
before
=
magic64
x
.
after
=
magic64
for
delta
:=
uint64
(
1
);
delta
+
delta
>
delta
;
delta
+=
delta
{
k
:=
LoadUint64
(
&
x
.
i
)
if
k
!=
x
.
i
{
t
.
Fatalf
(
"delta=%d i=%d k=%d"
,
delta
,
x
.
i
,
k
)
}
x
.
i
+=
delta
}
if
x
.
before
!=
magic64
||
x
.
after
!=
magic64
{
t
.
Fatalf
(
"wrong magic: %#x _ %#x != %#x _ %#x"
,
x
.
before
,
x
.
after
,
uint64
(
magic64
),
uint64
(
magic64
))
}
}
func
TestLoadUintptr
(
t
*
testing
.
T
)
{
func
TestLoadUintptr
(
t
*
testing
.
T
)
{
var
x
struct
{
var
x
struct
{
before
uintptr
before
uintptr
...
@@ -465,6 +513,56 @@ func TestStoreUint32(t *testing.T) {
...
@@ -465,6 +513,56 @@ func TestStoreUint32(t *testing.T) {
}
}
}
}
func
TestStoreInt64
(
t
*
testing
.
T
)
{
if
test64err
!=
nil
{
t
.
Logf
(
"Skipping 64-bit tests: %v"
,
test64err
)
return
}
var
x
struct
{
before
int64
i
int64
after
int64
}
x
.
before
=
magic64
x
.
after
=
magic64
v
:=
int64
(
0
)
for
delta
:=
int64
(
1
);
delta
+
delta
>
delta
;
delta
+=
delta
{
StoreInt64
(
&
x
.
i
,
v
)
if
x
.
i
!=
v
{
t
.
Fatalf
(
"delta=%d i=%d v=%d"
,
delta
,
x
.
i
,
v
)
}
v
+=
delta
}
if
x
.
before
!=
magic64
||
x
.
after
!=
magic64
{
t
.
Fatalf
(
"wrong magic: %#x _ %#x != %#x _ %#x"
,
x
.
before
,
x
.
after
,
uint64
(
magic64
),
uint64
(
magic64
))
}
}
func
TestStoreUint64
(
t
*
testing
.
T
)
{
if
test64err
!=
nil
{
t
.
Logf
(
"Skipping 64-bit tests: %v"
,
test64err
)
return
}
var
x
struct
{
before
uint64
i
uint64
after
uint64
}
x
.
before
=
magic64
x
.
after
=
magic64
v
:=
uint64
(
0
)
for
delta
:=
uint64
(
1
);
delta
+
delta
>
delta
;
delta
+=
delta
{
StoreUint64
(
&
x
.
i
,
v
)
if
x
.
i
!=
v
{
t
.
Fatalf
(
"delta=%d i=%d v=%d"
,
delta
,
x
.
i
,
v
)
}
v
+=
delta
}
if
x
.
before
!=
magic64
||
x
.
after
!=
magic64
{
t
.
Fatalf
(
"wrong magic: %#x _ %#x != %#x _ %#x"
,
x
.
before
,
x
.
after
,
uint64
(
magic64
),
uint64
(
magic64
))
}
}
func
TestStoreUintptr
(
t
*
testing
.
T
)
{
func
TestStoreUintptr
(
t
*
testing
.
T
)
{
var
x
struct
{
var
x
struct
{
before
uintptr
before
uintptr
...
@@ -777,7 +875,7 @@ func hammerStoreLoadInt32(t *testing.T, valp unsafe.Pointer) {
...
@@ -777,7 +875,7 @@ func hammerStoreLoadInt32(t *testing.T, valp unsafe.Pointer) {
vlo
:=
v
&
((
1
<<
16
)
-
1
)
vlo
:=
v
&
((
1
<<
16
)
-
1
)
vhi
:=
v
>>
16
vhi
:=
v
>>
16
if
vlo
!=
vhi
{
if
vlo
!=
vhi
{
t
.
Fatalf
(
"
Load
Int32: %#x != %#x"
,
vlo
,
vhi
)
t
.
Fatalf
(
"Int32: %#x != %#x"
,
vlo
,
vhi
)
}
}
new
:=
v
+
1
+
1
<<
16
new
:=
v
+
1
+
1
<<
16
if
vlo
==
1e4
{
if
vlo
==
1e4
{
...
@@ -792,7 +890,7 @@ func hammerStoreLoadUint32(t *testing.T, valp unsafe.Pointer) {
...
@@ -792,7 +890,7 @@ func hammerStoreLoadUint32(t *testing.T, valp unsafe.Pointer) {
vlo
:=
v
&
((
1
<<
16
)
-
1
)
vlo
:=
v
&
((
1
<<
16
)
-
1
)
vhi
:=
v
>>
16
vhi
:=
v
>>
16
if
vlo
!=
vhi
{
if
vlo
!=
vhi
{
t
.
Fatalf
(
"
Load
Uint32: %#x != %#x"
,
vlo
,
vhi
)
t
.
Fatalf
(
"Uint32: %#x != %#x"
,
vlo
,
vhi
)
}
}
new
:=
v
+
1
+
1
<<
16
new
:=
v
+
1
+
1
<<
16
if
vlo
==
1e4
{
if
vlo
==
1e4
{
...
@@ -801,6 +899,30 @@ func hammerStoreLoadUint32(t *testing.T, valp unsafe.Pointer) {
...
@@ -801,6 +899,30 @@ func hammerStoreLoadUint32(t *testing.T, valp unsafe.Pointer) {
StoreUint32
(
val
,
new
)
StoreUint32
(
val
,
new
)
}
}
func
hammerStoreLoadInt64
(
t
*
testing
.
T
,
valp
unsafe
.
Pointer
)
{
val
:=
(
*
int64
)(
valp
)
v
:=
LoadInt64
(
val
)
vlo
:=
v
&
((
1
<<
32
)
-
1
)
vhi
:=
v
>>
32
if
vlo
!=
vhi
{
t
.
Fatalf
(
"Int64: %#x != %#x"
,
vlo
,
vhi
)
}
new
:=
v
+
1
+
1
<<
32
StoreInt64
(
val
,
new
)
}
func
hammerStoreLoadUint64
(
t
*
testing
.
T
,
valp
unsafe
.
Pointer
)
{
val
:=
(
*
uint64
)(
valp
)
v
:=
LoadUint64
(
val
)
vlo
:=
v
&
((
1
<<
32
)
-
1
)
vhi
:=
v
>>
32
if
vlo
!=
vhi
{
t
.
Fatalf
(
"Uint64: %#x != %#x"
,
vlo
,
vhi
)
}
new
:=
v
+
1
+
1
<<
32
StoreUint64
(
val
,
new
)
}
func
hammerStoreLoadUintptr
(
t
*
testing
.
T
,
valp
unsafe
.
Pointer
)
{
func
hammerStoreLoadUintptr
(
t
*
testing
.
T
,
valp
unsafe
.
Pointer
)
{
val
:=
(
*
uintptr
)(
valp
)
val
:=
(
*
uintptr
)(
valp
)
var
test64
uint64
=
1
<<
50
var
test64
uint64
=
1
<<
50
...
@@ -811,7 +933,7 @@ func hammerStoreLoadUintptr(t *testing.T, valp unsafe.Pointer) {
...
@@ -811,7 +933,7 @@ func hammerStoreLoadUintptr(t *testing.T, valp unsafe.Pointer) {
vlo
:=
v
&
((
1
<<
16
)
-
1
)
vlo
:=
v
&
((
1
<<
16
)
-
1
)
vhi
:=
v
>>
16
vhi
:=
v
>>
16
if
vlo
!=
vhi
{
if
vlo
!=
vhi
{
t
.
Fatalf
(
"
Load
Uintptr: %#x != %#x"
,
vlo
,
vhi
)
t
.
Fatalf
(
"Uintptr: %#x != %#x"
,
vlo
,
vhi
)
}
}
new
=
v
+
1
+
1
<<
16
new
=
v
+
1
+
1
<<
16
if
vlo
==
1e4
{
if
vlo
==
1e4
{
...
@@ -821,7 +943,7 @@ func hammerStoreLoadUintptr(t *testing.T, valp unsafe.Pointer) {
...
@@ -821,7 +943,7 @@ func hammerStoreLoadUintptr(t *testing.T, valp unsafe.Pointer) {
vlo
:=
v
&
((
1
<<
32
)
-
1
)
vlo
:=
v
&
((
1
<<
32
)
-
1
)
vhi
:=
v
>>
32
vhi
:=
v
>>
32
if
vlo
!=
vhi
{
if
vlo
!=
vhi
{
t
.
Fatalf
(
"
Load
Uintptr: %#x != %#x"
,
vlo
,
vhi
)
t
.
Fatalf
(
"Uintptr: %#x != %#x"
,
vlo
,
vhi
)
}
}
inc
:=
uint64
(
1
+
1
<<
32
)
inc
:=
uint64
(
1
+
1
<<
32
)
new
=
v
+
uintptr
(
inc
)
new
=
v
+
uintptr
(
inc
)
...
@@ -839,7 +961,7 @@ func hammerStoreLoadPointer(t *testing.T, valp unsafe.Pointer) {
...
@@ -839,7 +961,7 @@ func hammerStoreLoadPointer(t *testing.T, valp unsafe.Pointer) {
vlo
:=
v
&
((
1
<<
16
)
-
1
)
vlo
:=
v
&
((
1
<<
16
)
-
1
)
vhi
:=
v
>>
16
vhi
:=
v
>>
16
if
vlo
!=
vhi
{
if
vlo
!=
vhi
{
t
.
Fatalf
(
"
LoadUintpt
r: %#x != %#x"
,
vlo
,
vhi
)
t
.
Fatalf
(
"
Pointe
r: %#x != %#x"
,
vlo
,
vhi
)
}
}
new
=
v
+
1
+
1
<<
16
new
=
v
+
1
+
1
<<
16
if
vlo
==
1e4
{
if
vlo
==
1e4
{
...
@@ -849,7 +971,7 @@ func hammerStoreLoadPointer(t *testing.T, valp unsafe.Pointer) {
...
@@ -849,7 +971,7 @@ func hammerStoreLoadPointer(t *testing.T, valp unsafe.Pointer) {
vlo
:=
v
&
((
1
<<
32
)
-
1
)
vlo
:=
v
&
((
1
<<
32
)
-
1
)
vhi
:=
v
>>
32
vhi
:=
v
>>
32
if
vlo
!=
vhi
{
if
vlo
!=
vhi
{
t
.
Fatalf
(
"
LoadUintpt
r: %#x != %#x"
,
vlo
,
vhi
)
t
.
Fatalf
(
"
Pointe
r: %#x != %#x"
,
vlo
,
vhi
)
}
}
inc
:=
uint64
(
1
+
1
<<
32
)
inc
:=
uint64
(
1
+
1
<<
32
)
new
=
v
+
uintptr
(
inc
)
new
=
v
+
uintptr
(
inc
)
...
@@ -858,8 +980,12 @@ func hammerStoreLoadPointer(t *testing.T, valp unsafe.Pointer) {
...
@@ -858,8 +980,12 @@ func hammerStoreLoadPointer(t *testing.T, valp unsafe.Pointer) {
}
}
func
TestHammerStoreLoad
(
t
*
testing
.
T
)
{
func
TestHammerStoreLoad
(
t
*
testing
.
T
)
{
tests
:=
[
...
]
func
(
*
testing
.
T
,
unsafe
.
Pointer
){
hammerStoreLoadInt32
,
hammerStoreLoadUint32
,
var
tests
[]
func
(
*
testing
.
T
,
unsafe
.
Pointer
)
hammerStoreLoadUintptr
,
hammerStoreLoadPointer
}
tests
=
append
(
tests
,
hammerStoreLoadInt32
,
hammerStoreLoadUint32
,
hammerStoreLoadUintptr
,
hammerStoreLoadPointer
)
if
test64err
==
nil
{
tests
=
append
(
tests
,
hammerStoreLoadInt64
,
hammerStoreLoadUint64
)
}
n
:=
int
(
1e6
)
n
:=
int
(
1e6
)
if
testing
.
Short
()
{
if
testing
.
Short
()
{
n
=
int
(
1e4
)
n
=
int
(
1e4
)
...
@@ -883,7 +1009,7 @@ func TestHammerStoreLoad(t *testing.T) {
...
@@ -883,7 +1009,7 @@ func TestHammerStoreLoad(t *testing.T) {
}
}
}
}
func
TestStoreLoadSeqCst
(
t
*
testing
.
T
)
{
func
TestStoreLoadSeqCst
32
(
t
*
testing
.
T
)
{
defer
runtime
.
GOMAXPROCS
(
runtime
.
GOMAXPROCS
(
4
))
defer
runtime
.
GOMAXPROCS
(
runtime
.
GOMAXPROCS
(
4
))
N
:=
int32
(
1e3
)
N
:=
int32
(
1e3
)
if
testing
.
Short
()
{
if
testing
.
Short
()
{
...
@@ -898,13 +1024,54 @@ func TestStoreLoadSeqCst(t *testing.T) {
...
@@ -898,13 +1024,54 @@ func TestStoreLoadSeqCst(t *testing.T) {
for
i
:=
int32
(
1
);
i
<
N
;
i
++
{
for
i
:=
int32
(
1
);
i
<
N
;
i
++
{
StoreInt32
(
&
X
[
me
],
i
)
StoreInt32
(
&
X
[
me
],
i
)
my
:=
LoadInt32
(
&
X
[
he
])
my
:=
LoadInt32
(
&
X
[
he
])
ack
[
me
][
i
%
3
]
=
my
StoreInt32
(
&
ack
[
me
][
i
%
3
],
my
)
for
w
:=
1
;
ack
[
he
][
i
%
3
]
==
-
1
;
w
++
{
for
w
:=
1
;
LoadInt32
(
&
ack
[
he
][
i
%
3
])
==
-
1
;
w
++
{
if
w
%
1000
==
0
{
runtime
.
Gosched
()
}
}
his
:=
LoadInt32
(
&
ack
[
he
][
i
%
3
])
if
(
my
!=
i
&&
my
!=
i
-
1
)
||
(
his
!=
i
&&
his
!=
i
-
1
)
{
t
.
Fatalf
(
"invalid values: %d/%d (%d)"
,
my
,
his
,
i
)
}
if
my
!=
i
&&
his
!=
i
{
t
.
Fatalf
(
"store/load are not sequentially consistent: %d/%d (%d)"
,
my
,
his
,
i
)
}
ack
[
me
][(
i
-
1
)
%
3
]
=
-
1
}
c
<-
true
}(
p
)
}
<-
c
<-
c
}
func
TestStoreLoadSeqCst64
(
t
*
testing
.
T
)
{
if
test64err
!=
nil
{
t
.
Logf
(
"Skipping 64-bit tests: %v"
,
test64err
)
return
}
defer
runtime
.
GOMAXPROCS
(
runtime
.
GOMAXPROCS
(
4
))
N
:=
int64
(
1e3
)
if
testing
.
Short
()
{
N
=
int64
(
1e2
)
}
c
:=
make
(
chan
bool
,
2
)
X
:=
[
2
]
int64
{}
ack
:=
[
2
][
3
]
int64
{{
-
1
,
-
1
,
-
1
},
{
-
1
,
-
1
,
-
1
}}
for
p
:=
0
;
p
<
2
;
p
++
{
go
func
(
me
int
)
{
he
:=
1
-
me
for
i
:=
int64
(
1
);
i
<
N
;
i
++
{
StoreInt64
(
&
X
[
me
],
i
)
my
:=
LoadInt64
(
&
X
[
he
])
StoreInt64
(
&
ack
[
me
][
i
%
3
],
my
)
for
w
:=
1
;
LoadInt64
(
&
ack
[
he
][
i
%
3
])
==
-
1
;
w
++
{
if
w
%
1000
==
0
{
if
w
%
1000
==
0
{
runtime
.
Gosched
()
runtime
.
Gosched
()
}
}
}
}
his
:=
ack
[
he
][
i
%
3
]
his
:=
LoadInt64
(
&
ack
[
he
][
i
%
3
])
if
(
my
!=
i
&&
my
!=
i
-
1
)
||
(
his
!=
i
&&
his
!=
i
-
1
)
{
if
(
my
!=
i
&&
my
!=
i
-
1
)
||
(
his
!=
i
&&
his
!=
i
-
1
)
{
t
.
Fatalf
(
"invalid values: %d/%d (%d)"
,
my
,
his
,
i
)
t
.
Fatalf
(
"invalid values: %d/%d (%d)"
,
my
,
his
,
i
)
}
}
...
@@ -919,3 +1086,91 @@ func TestStoreLoadSeqCst(t *testing.T) {
...
@@ -919,3 +1086,91 @@ func TestStoreLoadSeqCst(t *testing.T) {
<-
c
<-
c
<-
c
<-
c
}
}
func
TestStoreLoadRelAcq32
(
t
*
testing
.
T
)
{
defer
runtime
.
GOMAXPROCS
(
runtime
.
GOMAXPROCS
(
4
))
N
:=
int32
(
1e3
)
if
testing
.
Short
()
{
N
=
int32
(
1e2
)
}
c
:=
make
(
chan
bool
,
2
)
type
Data
struct
{
signal
int32
pad1
[
128
]
int8
data1
int32
pad2
[
128
]
int8
data2
float32
}
var
X
Data
for
p
:=
int32
(
0
);
p
<
2
;
p
++
{
go
func
(
p
int32
)
{
for
i
:=
int32
(
1
);
i
<
N
;
i
++
{
if
(
i
+
p
)
%
2
==
0
{
X
.
data1
=
i
X
.
data2
=
float32
(
i
)
StoreInt32
(
&
X
.
signal
,
i
)
}
else
{
for
w
:=
1
;
LoadInt32
(
&
X
.
signal
)
!=
i
;
w
++
{
if
w
%
1000
==
0
{
runtime
.
Gosched
()
}
}
d1
:=
X
.
data1
d2
:=
X
.
data2
if
d1
!=
i
||
d2
!=
float32
(
i
)
{
t
.
Fatalf
(
"incorrect data: %d/%d (%d)"
,
d1
,
d2
,
i
)
}
}
}
c
<-
true
}(
p
)
}
<-
c
<-
c
}
func
TestStoreLoadRelAcq64
(
t
*
testing
.
T
)
{
if
test64err
!=
nil
{
t
.
Logf
(
"Skipping 64-bit tests: %v"
,
test64err
)
return
}
defer
runtime
.
GOMAXPROCS
(
runtime
.
GOMAXPROCS
(
4
))
N
:=
int64
(
1e3
)
if
testing
.
Short
()
{
N
=
int64
(
1e2
)
}
c
:=
make
(
chan
bool
,
2
)
type
Data
struct
{
signal
int64
pad1
[
128
]
int8
data1
int64
pad2
[
128
]
int8
data2
float64
}
var
X
Data
for
p
:=
int64
(
0
);
p
<
2
;
p
++
{
go
func
(
p
int64
)
{
for
i
:=
int64
(
1
);
i
<
N
;
i
++
{
if
(
i
+
p
)
%
2
==
0
{
X
.
data1
=
i
X
.
data2
=
float64
(
i
)
StoreInt64
(
&
X
.
signal
,
i
)
}
else
{
for
w
:=
1
;
LoadInt64
(
&
X
.
signal
)
!=
i
;
w
++
{
if
w
%
1000
==
0
{
runtime
.
Gosched
()
}
}
d1
:=
X
.
data1
d2
:=
X
.
data2
if
d1
!=
i
||
d2
!=
float64
(
i
)
{
t
.
Fatalf
(
"incorrect data: %d/%d (%d)"
,
d1
,
d2
,
i
)
}
}
}
c
<-
true
}(
p
)
}
<-
c
<-
c
}
src/pkg/sync/atomic/doc.go
View file @
cf0d8c09
...
@@ -28,7 +28,7 @@ import (
...
@@ -28,7 +28,7 @@ import (
// BUG(rsc): On ARM, the 64-bit functions use instructions unavailable before ARM 11.
// BUG(rsc): On ARM, the 64-bit functions use instructions unavailable before ARM 11.
//
//
// On x86-32, the 64-bit functions use instructions unavailable before the Pentium.
// On x86-32, the 64-bit functions use instructions unavailable before the Pentium
MMX
.
// CompareAndSwapInt32 executes the compare-and-swap operation for an int32 value.
// CompareAndSwapInt32 executes the compare-and-swap operation for an int32 value.
func
CompareAndSwapInt32
(
val
*
int32
,
old
,
new
int32
)
(
swapped
bool
)
func
CompareAndSwapInt32
(
val
*
int32
,
old
,
new
int32
)
(
swapped
bool
)
...
@@ -66,9 +66,15 @@ func AddUintptr(val *uintptr, delta uintptr) (new uintptr)
...
@@ -66,9 +66,15 @@ func AddUintptr(val *uintptr, delta uintptr) (new uintptr)
// LoadInt32 atomically loads *addr.
// LoadInt32 atomically loads *addr.
func
LoadInt32
(
addr
*
int32
)
(
val
int32
)
func
LoadInt32
(
addr
*
int32
)
(
val
int32
)
// LoadInt64 atomically loads *addr.
func
LoadInt64
(
addr
*
int64
)
(
val
int64
)
// LoadUint32 atomically loads *addr.
// LoadUint32 atomically loads *addr.
func
LoadUint32
(
addr
*
uint32
)
(
val
uint32
)
func
LoadUint32
(
addr
*
uint32
)
(
val
uint32
)
// LoadUint64 atomically loads *addr.
func
LoadUint64
(
addr
*
uint64
)
(
val
uint64
)
// LoadUintptr atomically loads *addr.
// LoadUintptr atomically loads *addr.
func
LoadUintptr
(
addr
*
uintptr
)
(
val
uintptr
)
func
LoadUintptr
(
addr
*
uintptr
)
(
val
uintptr
)
...
@@ -78,9 +84,15 @@ func LoadPointer(addr *unsafe.Pointer) (val unsafe.Pointer)
...
@@ -78,9 +84,15 @@ func LoadPointer(addr *unsafe.Pointer) (val unsafe.Pointer)
// StoreInt32 atomically stores val into *addr.
// StoreInt32 atomically stores val into *addr.
func
StoreInt32
(
addr
*
int32
,
val
int32
)
func
StoreInt32
(
addr
*
int32
,
val
int32
)
// StoreInt64 atomically stores val into *addr.
func
StoreInt64
(
addr
*
int64
,
val
int64
)
// StoreUint32 atomically stores val into *addr.
// StoreUint32 atomically stores val into *addr.
func
StoreUint32
(
addr
*
uint32
,
val
uint32
)
func
StoreUint32
(
addr
*
uint32
,
val
uint32
)
// StoreUint64 atomically stores val into *addr.
func
StoreUint64
(
addr
*
uint64
,
val
uint64
)
// StoreUintptr atomically stores val into *addr.
// StoreUintptr atomically stores val into *addr.
func
StoreUintptr
(
addr
*
uintptr
,
val
uintptr
)
func
StoreUintptr
(
addr
*
uintptr
,
val
uintptr
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment