-
Notifications
You must be signed in to change notification settings - Fork 0
/
rte_atomic.h
226 lines (188 loc) · 4.63 KB
/
rte_atomic.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
#ifndef _RTE_ATOMIC_H_
#define _RTE_ATOMIC_H_
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
#ifndef asm
#define asm __asm__
#endif
#if 0
#define MPLOCKED /**< No need to insert MP lock prefix. */
#else
#define MPLOCKED "lock ; " /**< Insert MP lock prefix. */
#endif
/*------------------------- 32 bit atomic operations -------------------------*/
/**
* The atomic counter structure.
*/
typedef struct {
volatile int32_t cnt; /**< An internal counter value. */
} rte_atomic32_t;
static inline int
rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
{
uint8_t res;
asm volatile(
MPLOCKED
"cmpxchgl %[src], %[dst];"
"sete %[res];"
: [res] "=a" (res), /* output */
[dst] "=m" (*dst)
: [src] "r" (src), /* input */
"a" (exp),
"m" (*dst)
: "memory"); /* no-clobber list */
return res;
}
static inline uint32_t
rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
{
asm volatile(
MPLOCKED
"xchgl %0, %1;"
: "=r" (val), "=m" (*dst)
: "0" (val), "m" (*dst)
: "memory"); /* no-clobber list */
return val;
}
static inline void
rte_atomic32_init(rte_atomic32_t *v)
{
v->cnt = 0;
}
static inline void
rte_atomic32_inc(rte_atomic32_t *v)
{
asm volatile(
MPLOCKED
"incl %[cnt]"
: [cnt] "=m" (v->cnt) /* output */
: "m" (v->cnt) /* input */
);
}
static inline void
rte_atomic32_dec(rte_atomic32_t *v)
{
asm volatile(
MPLOCKED
"decl %[cnt]"
: [cnt] "=m" (v->cnt) /* output */
: "m" (v->cnt) /* input */
);
}
static inline int
rte_atomic32_read(rte_atomic32_t *v)
{
return v->cnt;
}
/*------------------------- 64 bit atomic operations -------------------------*/
/**
* The atomic counter structure.
*/
typedef struct {
volatile int64_t cnt; /**< Internal counter value. */
} rte_atomic64_t;
static inline int
rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
{
uint8_t res;
asm volatile(
MPLOCKED
"cmpxchgq %[src], %[dst];"
"sete %[res];"
: [res] "=a" (res), /* output */
[dst] "=m" (*dst)
: [src] "r" (src), /* input */
"a" (exp),
"m" (*dst)
: "memory"); /* no-clobber list */
return res;
}
static inline uint64_t
rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
{
asm volatile(
MPLOCKED
"xchgq %0, %1;"
: "=r" (val), "=m" (*dst)
: "0" (val), "m" (*dst)
: "memory"); /* no-clobber list */
return val;
}
static inline void
rte_atomic64_init(rte_atomic64_t *v)
{
v->cnt = 0;
}
static inline int64_t
rte_atomic64_read(rte_atomic64_t *v)
{
return v->cnt;
}
static inline void
rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
{
v->cnt = new_value;
}
static inline void
rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
{
asm volatile(
MPLOCKED
"addq %[inc], %[cnt]"
: [cnt] "=m" (v->cnt) /* output */
: [inc] "ir" (inc), /* input */
"m" (v->cnt)
);
}
static inline void
rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
{
asm volatile(
MPLOCKED
"subq %[dec], %[cnt]"
: [cnt] "=m" (v->cnt) /* output */
: [dec] "ir" (dec), /* input */
"m" (v->cnt)
);
}
static inline void
rte_atomic64_inc(rte_atomic64_t *v)
{
asm volatile(
MPLOCKED
"incq %[cnt]"
: [cnt] "=m" (v->cnt) /* output */
: "m" (v->cnt) /* input */
);
}
static inline void
rte_atomic64_dec(rte_atomic64_t *v)
{
asm volatile(
MPLOCKED
"decq %[cnt]"
: [cnt] "=m" (v->cnt) /* output */
: "m" (v->cnt) /* input */
);
}
static inline int64_t
rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
{
int64_t prev = inc;
asm volatile(
MPLOCKED
"xaddq %[prev], %[cnt]"
: [prev] "+r" (prev), /* output */
[cnt] "=m" (v->cnt)
: "m" (v->cnt) /* input */
);
return prev + inc;
}
static inline int64_t
rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
{
return rte_atomic64_add_return(v, -dec);
}
#endif