DPDK  25.03.0
rte_rwlock.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_RWLOCK_H_
6 #define _RTE_RWLOCK_H_
7 
25 #include <errno.h>
26 #include <stdbool.h>
27 
28 #include <rte_branch_prediction.h>
29 #include <rte_common.h>
30 #include <rte_lock_annotations.h>
31 #include <rte_pause.h>
32 #include <rte_stdatomic.h>
33 
34 #ifdef __cplusplus
35 extern "C" {
36 #endif
37 
55 #define RTE_RWLOCK_WAIT 0x1 /* Writer is waiting */
56 #define RTE_RWLOCK_WRITE 0x2 /* Writer has the lock */
57 #define RTE_RWLOCK_MASK (RTE_RWLOCK_WAIT | RTE_RWLOCK_WRITE)
58  /* Writer is waiting or has lock */
59 #define RTE_RWLOCK_READ 0x4 /* Reader increment */
60 
61 typedef struct __rte_capability("rwlock") {
62  RTE_ATOMIC(int32_t) cnt;
63 } rte_rwlock_t;
64 
68 #define RTE_RWLOCK_INITIALIZER { 0 }
69 
76 static inline void
77 rte_rwlock_init(rte_rwlock_t *rwl)
78 {
79  rwl->cnt = 0;
80 }
81 
92 static inline void
93 rte_rwlock_read_lock(rte_rwlock_t *rwl)
94  __rte_acquire_shared_capability(rwl)
95  __rte_no_thread_safety_analysis
96 {
97  int32_t x;
98 
99  while (1) {
100  /* Wait while writer is present or pending */
101  while (rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed)
102  & RTE_RWLOCK_MASK)
103  rte_pause();
104 
105  /* Try to get read lock */
106  x = rte_atomic_fetch_add_explicit(&rwl->cnt, RTE_RWLOCK_READ,
107  rte_memory_order_acquire) + RTE_RWLOCK_READ;
108 
109  /* If no writer, then acquire was successful */
110  if (likely(!(x & RTE_RWLOCK_MASK)))
111  return;
112 
113  /* Lost race with writer, backout the change. */
114  rte_atomic_fetch_sub_explicit(&rwl->cnt, RTE_RWLOCK_READ,
115  rte_memory_order_relaxed);
116  }
117 }
118 
129 static inline int
130 rte_rwlock_read_trylock(rte_rwlock_t *rwl)
131  __rte_try_acquire_shared_capability(false, rwl)
132  __rte_no_thread_safety_analysis
133 {
134  int32_t x;
135 
136  x = rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed);
137 
138  /* fail if write lock is held or writer is pending */
139  if (x & RTE_RWLOCK_MASK)
140  return -EBUSY;
141 
142  /* Try to get read lock */
143  x = rte_atomic_fetch_add_explicit(&rwl->cnt, RTE_RWLOCK_READ,
144  rte_memory_order_acquire) + RTE_RWLOCK_READ;
145 
146  /* Back out if writer raced in */
147  if (unlikely(x & RTE_RWLOCK_MASK)) {
148  rte_atomic_fetch_sub_explicit(&rwl->cnt, RTE_RWLOCK_READ,
149  rte_memory_order_release);
150 
151  return -EBUSY;
152  }
153  return 0;
154 }
155 
162 static inline void
163 rte_rwlock_read_unlock(rte_rwlock_t *rwl)
164  __rte_release_shared_capability(rwl)
165  __rte_no_thread_safety_analysis
166 {
167  rte_atomic_fetch_sub_explicit(&rwl->cnt, RTE_RWLOCK_READ, rte_memory_order_release);
168 }
169 
180 static inline int
181 rte_rwlock_write_trylock(rte_rwlock_t *rwl)
182  __rte_try_acquire_capability(false, rwl)
183  __rte_no_thread_safety_analysis
184 {
185  int32_t x;
186 
187  x = rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed);
188  if (x < RTE_RWLOCK_WRITE &&
189  rte_atomic_compare_exchange_weak_explicit(&rwl->cnt, &x, x + RTE_RWLOCK_WRITE,
190  rte_memory_order_acquire, rte_memory_order_relaxed))
191  return 0;
192  else
193  return -EBUSY;
194 }
195 
202 static inline void
203 rte_rwlock_write_lock(rte_rwlock_t *rwl)
204  __rte_acquire_capability(rwl)
205  __rte_no_thread_safety_analysis
206 {
207  int32_t x;
208 
209  while (1) {
210  x = rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed);
211 
212  /* No readers or writers? */
213  if (likely(x < RTE_RWLOCK_WRITE)) {
214  /* Turn off RTE_RWLOCK_WAIT, turn on RTE_RWLOCK_WRITE */
215  if (rte_atomic_compare_exchange_weak_explicit(&rwl->cnt, &x,
216  RTE_RWLOCK_WRITE, rte_memory_order_acquire,
217  rte_memory_order_relaxed))
218  return;
219  }
220 
221  /* Turn on writer wait bit */
222  if (!(x & RTE_RWLOCK_WAIT))
223  rte_atomic_fetch_or_explicit(&rwl->cnt, RTE_RWLOCK_WAIT,
224  rte_memory_order_relaxed);
225 
226  /* Wait until no readers before trying again */
227  while (rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed)
228  > RTE_RWLOCK_WAIT)
229  rte_pause();
230 
231  }
232 }
233 
240 static inline void
241 rte_rwlock_write_unlock(rte_rwlock_t *rwl)
242  __rte_release_capability(rwl)
243  __rte_no_thread_safety_analysis
244 {
245  rte_atomic_fetch_sub_explicit(&rwl->cnt, RTE_RWLOCK_WRITE, rte_memory_order_release);
246 }
247 
256 static inline int
257 rte_rwlock_write_is_locked(rte_rwlock_t *rwl)
258 {
259  if (rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed) & RTE_RWLOCK_WRITE)
260  return 1;
261 
262  return 0;
263 }
264 
278 static inline void
279 rte_rwlock_read_lock_tm(rte_rwlock_t *rwl)
280  __rte_acquire_shared_capability(rwl);
281 
288 static inline void
289 rte_rwlock_read_unlock_tm(rte_rwlock_t *rwl)
290  __rte_release_shared_capability(rwl);
291 
305 static inline void
306 rte_rwlock_write_lock_tm(rte_rwlock_t *rwl)
307  __rte_acquire_capability(rwl);
308 
315 static inline void
316 rte_rwlock_write_unlock_tm(rte_rwlock_t *rwl)
317  __rte_release_capability(rwl);
318 
319 #ifdef __cplusplus
320 }
321 #endif
322 
323 #endif /* _RTE_RWLOCK_H_ */
#define likely(x)
static void rte_rwlock_write_lock_tm(rte_rwlock_t *rwl)
static int rte_rwlock_read_trylock(rte_rwlock_t *rwl) rwl) __rte_no_thread_safety_analysis
Definition: rte_rwlock.h:130
#define unlikely(x)
static int rte_rwlock_write_is_locked(rte_rwlock_t *rwl)
Definition: rte_rwlock.h:257
static void rte_rwlock_read_lock_tm(rte_rwlock_t *rwl)
static void rte_pause(void)
#define RTE_RWLOCK_WAIT
Definition: rte_rwlock.h:55
static void rte_rwlock_write_unlock(rte_rwlock_t *rwl) __rte_no_thread_safety_analysis
Definition: rte_rwlock.h:241
static int rte_rwlock_write_trylock(rte_rwlock_t *rwl) rwl) __rte_no_thread_safety_analysis
Definition: rte_rwlock.h:181
static void rte_rwlock_read_unlock_tm(rte_rwlock_t *rwl)
static void rte_rwlock_write_lock(rte_rwlock_t *rwl) __rte_no_thread_safety_analysis
Definition: rte_rwlock.h:203
static void rte_rwlock_init(rte_rwlock_t *rwl)
Definition: rte_rwlock.h:77
static void rte_rwlock_write_unlock_tm(rte_rwlock_t *rwl)
static void rte_rwlock_read_lock(rte_rwlock_t *rwl) __rte_no_thread_safety_analysis
Definition: rte_rwlock.h:93
static void rte_rwlock_read_unlock(rte_rwlock_t *rwl) __rte_no_thread_safety_analysis
Definition: rte_rwlock.h:163