atomic_mipsx.mx raw
1 // Copyright 2016 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
4
5 //go:build mips || mipsle
6
7 // Export some functions via linkname to assembly in sync/atomic.
8 //
9 //go:linkname Xadd64
10 //go:linkname Xchg64
11 //go:linkname Cas64
12 //go:linkname Load64
13 //go:linkname Store64
14 //go:linkname Or64
15 //go:linkname And64
16
17 package atomic
18
19 import (
20 "internal/cpu"
21 "unsafe"
22 )
23
24 // TODO implement lock striping
25 var lock struct {
26 state uint32
27 pad [cpu.CacheLinePadSize - 4]byte
28 }
29
30 //go:noescape
31 func spinLock(state *uint32)
32
33 //go:noescape
34 func spinUnlock(state *uint32)
35
36 //go:nosplit
37 func lockAndCheck(addr *uint64) {
38 // ensure 8-byte alignment
39 if uintptr(unsafe.Pointer(addr))&7 != 0 {
40 panicUnaligned()
41 }
42 // force dereference before taking lock
43 _ = *addr
44
45 spinLock(&lock.state)
46 }
47
48 //go:nosplit
49 func unlock() {
50 spinUnlock(&lock.state)
51 }
52
53 //go:nosplit
54 func Xadd64(addr *uint64, delta int64) (new uint64) {
55 lockAndCheck(addr)
56
57 new = *addr + uint64(delta)
58 *addr = new
59
60 unlock()
61 return
62 }
63
64 //go:nosplit
65 func Xchg64(addr *uint64, new uint64) (old uint64) {
66 lockAndCheck(addr)
67
68 old = *addr
69 *addr = new
70
71 unlock()
72 return
73 }
74
75 //go:nosplit
76 func Cas64(addr *uint64, old, new uint64) (swapped bool) {
77 lockAndCheck(addr)
78
79 if (*addr) == old {
80 *addr = new
81 unlock()
82 return true
83 }
84
85 unlock()
86 return false
87 }
88
89 //go:nosplit
90 func Load64(addr *uint64) (val uint64) {
91 lockAndCheck(addr)
92
93 val = *addr
94
95 unlock()
96 return
97 }
98
99 //go:nosplit
100 func Store64(addr *uint64, val uint64) {
101 lockAndCheck(addr)
102
103 *addr = val
104
105 unlock()
106 return
107 }
108
109 //go:nosplit
110 func Or64(addr *uint64, val uint64) (old uint64) {
111 for {
112 old = *addr
113 if Cas64(addr, old, old|val) {
114 return old
115 }
116 }
117 }
118
119 //go:nosplit
120 func And64(addr *uint64, val uint64) (old uint64) {
121 for {
122 old = *addr
123 if Cas64(addr, old, old&val) {
124 return old
125 }
126 }
127 }
128
129 //go:noescape
130 func Xadd(ptr *uint32, delta int32) uint32
131
132 //go:noescape
133 func Xadduintptr(ptr *uintptr, delta uintptr) uintptr
134
135 //go:noescape
136 func Xchg(ptr *uint32, new uint32) uint32
137
138 //go:noescape
139 func Xchg8(ptr *uint8, new uint8) uint8
140
141 //go:noescape
142 func Xchguintptr(ptr *uintptr, new uintptr) uintptr
143
144 //go:noescape
145 func Load(ptr *uint32) uint32
146
147 //go:noescape
148 func Load8(ptr *uint8) uint8
149
150 // NO go:noescape annotation; *ptr escapes if result escapes (#31525)
151 func Loadp(ptr unsafe.Pointer) unsafe.Pointer
152
153 //go:noescape
154 func LoadAcq(ptr *uint32) uint32
155
156 //go:noescape
157 func LoadAcquintptr(ptr *uintptr) uintptr
158
159 //go:noescape
160 func And8(ptr *uint8, val uint8)
161
162 //go:noescape
163 func Or8(ptr *uint8, val uint8)
164
165 //go:noescape
166 func And(ptr *uint32, val uint32)
167
168 //go:noescape
169 func Or(ptr *uint32, val uint32)
170
171 //go:noescape
172 func And32(ptr *uint32, val uint32) uint32
173
174 //go:noescape
175 func Or32(ptr *uint32, val uint32) uint32
176
177 //go:noescape
178 func Anduintptr(ptr *uintptr, val uintptr) uintptr
179
180 //go:noescape
181 func Oruintptr(ptr *uintptr, val uintptr) uintptr
182
183 //go:noescape
184 func Store(ptr *uint32, val uint32)
185
186 //go:noescape
187 func Store8(ptr *uint8, val uint8)
188
189 // NO go:noescape annotation; see atomic_pointer.go.
190 func StorepNoWB(ptr unsafe.Pointer, val unsafe.Pointer)
191
192 //go:noescape
193 func StoreRel(ptr *uint32, val uint32)
194
195 //go:noescape
196 func StoreReluintptr(ptr *uintptr, val uintptr)
197
198 //go:noescape
199 func CasRel(addr *uint32, old, new uint32) bool
200