rcu_queue.h (c1e667d2598b9b3ce62b8e89ed22dd38dfe9f57f) | rcu_queue.h (d73415a315471ac0b127ed3fad45c8ec5d711de1) |
---|---|
1#ifndef QEMU_RCU_QUEUE_H 2#define QEMU_RCU_QUEUE_H 3 4/* 5 * rcu_queue.h 6 * 7 * RCU-friendly versions of the queue.h primitives. 8 * --- 22 unchanged lines hidden (view full) --- 31#ifdef __cplusplus 32extern "C" { 33#endif 34 35 36/* 37 * List access methods. 38 */ | 1#ifndef QEMU_RCU_QUEUE_H 2#define QEMU_RCU_QUEUE_H 3 4/* 5 * rcu_queue.h 6 * 7 * RCU-friendly versions of the queue.h primitives. 8 * --- 22 unchanged lines hidden (view full) --- 31#ifdef __cplusplus 32extern "C" { 33#endif 34 35 36/* 37 * List access methods. 38 */ |
39#define QLIST_EMPTY_RCU(head) (atomic_read(&(head)->lh_first) == NULL) 40#define QLIST_FIRST_RCU(head) (atomic_rcu_read(&(head)->lh_first)) 41#define QLIST_NEXT_RCU(elm, field) (atomic_rcu_read(&(elm)->field.le_next)) | 39#define QLIST_EMPTY_RCU(head) (qatomic_read(&(head)->lh_first) == NULL) 40#define QLIST_FIRST_RCU(head) (qatomic_rcu_read(&(head)->lh_first)) 41#define QLIST_NEXT_RCU(elm, field) (qatomic_rcu_read(&(elm)->field.le_next)) |
42 43/* 44 * List functions. 45 */ 46 47 48/* | 42 43/* 44 * List functions. 45 */ 46 47 48/* |
49 * The difference between atomic_read/set and atomic_rcu_read/set | 49 * The difference between qatomic_read/set and qatomic_rcu_read/set |
50 * is in the including of a read/write memory barrier to the volatile 51 * access. atomic_rcu_* macros include the memory barrier, the 52 * plain atomic macros do not. Therefore, it should be correct to 53 * issue a series of reads or writes to the same element using only 54 * the atomic_* macro, until the last read or write, which should be 55 * atomic_rcu_* to introduce a read or write memory barrier as 56 * appropriate. 57 */ 58 59/* Upon publication of the listelm->next value, list readers 60 * will see the new node when following next pointers from 61 * antecedent nodes, but may not see the new node when following 62 * prev pointers from subsequent nodes until after the RCU grace 63 * period expires. 64 * see linux/include/rculist.h __list_add_rcu(new, prev, next) 65 */ 66#define QLIST_INSERT_AFTER_RCU(listelm, elm, field) do { \ 67 (elm)->field.le_next = (listelm)->field.le_next; \ 68 (elm)->field.le_prev = &(listelm)->field.le_next; \ | 50 * is in the including of a read/write memory barrier to the volatile 51 * access. atomic_rcu_* macros include the memory barrier, the 52 * plain atomic macros do not. Therefore, it should be correct to 53 * issue a series of reads or writes to the same element using only 54 * the atomic_* macro, until the last read or write, which should be 55 * atomic_rcu_* to introduce a read or write memory barrier as 56 * appropriate. 57 */ 58 59/* Upon publication of the listelm->next value, list readers 60 * will see the new node when following next pointers from 61 * antecedent nodes, but may not see the new node when following 62 * prev pointers from subsequent nodes until after the RCU grace 63 * period expires. 64 * see linux/include/rculist.h __list_add_rcu(new, prev, next) 65 */ 66#define QLIST_INSERT_AFTER_RCU(listelm, elm, field) do { \ 67 (elm)->field.le_next = (listelm)->field.le_next; \ 68 (elm)->field.le_prev = &(listelm)->field.le_next; \ |
69 atomic_rcu_set(&(listelm)->field.le_next, (elm)); \ | 69 qatomic_rcu_set(&(listelm)->field.le_next, (elm)); \ |
70 if ((elm)->field.le_next != NULL) { \ 71 (elm)->field.le_next->field.le_prev = \ 72 &(elm)->field.le_next; \ 73 } \ 74} while (/*CONSTCOND*/0) 75 76/* Upon publication of the listelm->prev->next value, list 77 * readers will see the new element when following prev pointers 78 * from subsequent elements, but may not see the new element 79 * when following next pointers from antecedent elements 80 * until after the RCU grace period expires. 81 */ 82#define QLIST_INSERT_BEFORE_RCU(listelm, elm, field) do { \ 83 (elm)->field.le_prev = (listelm)->field.le_prev; \ 84 (elm)->field.le_next = (listelm); \ | 70 if ((elm)->field.le_next != NULL) { \ 71 (elm)->field.le_next->field.le_prev = \ 72 &(elm)->field.le_next; \ 73 } \ 74} while (/*CONSTCOND*/0) 75 76/* Upon publication of the listelm->prev->next value, list 77 * readers will see the new element when following prev pointers 78 * from subsequent elements, but may not see the new element 79 * when following next pointers from antecedent elements 80 * until after the RCU grace period expires. 81 */ 82#define QLIST_INSERT_BEFORE_RCU(listelm, elm, field) do { \ 83 (elm)->field.le_prev = (listelm)->field.le_prev; \ 84 (elm)->field.le_next = (listelm); \ |
85 atomic_rcu_set((listelm)->field.le_prev, (elm)); \ | 85 qatomic_rcu_set((listelm)->field.le_prev, (elm)); \ |
86 (listelm)->field.le_prev = &(elm)->field.le_next; \ 87} while (/*CONSTCOND*/0) 88 89/* Upon publication of the head->first value, list readers 90 * will see the new element when following the head, but may 91 * not see the new element when following prev pointers from 92 * subsequent elements until after the RCU grace period has 93 * expired. 94 */ 95#define QLIST_INSERT_HEAD_RCU(head, elm, field) do { \ 96 (elm)->field.le_prev = &(head)->lh_first; \ 97 (elm)->field.le_next = (head)->lh_first; \ | 86 (listelm)->field.le_prev = &(elm)->field.le_next; \ 87} while (/*CONSTCOND*/0) 88 89/* Upon publication of the head->first value, list readers 90 * will see the new element when following the head, but may 91 * not see the new element when following prev pointers from 92 * subsequent elements until after the RCU grace period has 93 * expired. 94 */ 95#define QLIST_INSERT_HEAD_RCU(head, elm, field) do { \ 96 (elm)->field.le_prev = &(head)->lh_first; \ 97 (elm)->field.le_next = (head)->lh_first; \ |
98 atomic_rcu_set((&(head)->lh_first), (elm)); \ | 98 qatomic_rcu_set((&(head)->lh_first), (elm)); \ |
99 if ((elm)->field.le_next != NULL) { \ 100 (elm)->field.le_next->field.le_prev = \ 101 &(elm)->field.le_next; \ 102 } \ 103} while (/*CONSTCOND*/0) 104 105 106/* prior to publication of the elm->prev->next value, some list 107 * readers may still see the removed element when following 108 * the antecedent's next pointer. 109 */ 110#define QLIST_REMOVE_RCU(elm, field) do { \ 111 if ((elm)->field.le_next != NULL) { \ 112 (elm)->field.le_next->field.le_prev = \ 113 (elm)->field.le_prev; \ 114 } \ | 99 if ((elm)->field.le_next != NULL) { \ 100 (elm)->field.le_next->field.le_prev = \ 101 &(elm)->field.le_next; \ 102 } \ 103} while (/*CONSTCOND*/0) 104 105 106/* prior to publication of the elm->prev->next value, some list 107 * readers may still see the removed element when following 108 * the antecedent's next pointer. 109 */ 110#define QLIST_REMOVE_RCU(elm, field) do { \ 111 if ((elm)->field.le_next != NULL) { \ 112 (elm)->field.le_next->field.le_prev = \ 113 (elm)->field.le_prev; \ 114 } \ |
115 atomic_set((elm)->field.le_prev, (elm)->field.le_next); \ | 115 qatomic_set((elm)->field.le_prev, (elm)->field.le_next); \ |
116} while (/*CONSTCOND*/0) 117 118/* List traversal must occur within an RCU critical section. */ 119#define QLIST_FOREACH_RCU(var, head, field) \ | 116} while (/*CONSTCOND*/0) 117 118/* List traversal must occur within an RCU critical section. */ 119#define QLIST_FOREACH_RCU(var, head, field) \ |
120 for ((var) = atomic_rcu_read(&(head)->lh_first); \ | 120 for ((var) = qatomic_rcu_read(&(head)->lh_first); \ |
121 (var); \ | 121 (var); \ |
122 (var) = atomic_rcu_read(&(var)->field.le_next)) | 122 (var) = qatomic_rcu_read(&(var)->field.le_next)) |
123 124/* List traversal must occur within an RCU critical section. */ 125#define QLIST_FOREACH_SAFE_RCU(var, head, field, next_var) \ | 123 124/* List traversal must occur within an RCU critical section. */ 125#define QLIST_FOREACH_SAFE_RCU(var, head, field, next_var) \ |
126 for ((var) = (atomic_rcu_read(&(head)->lh_first)); \ | 126 for ((var) = (qatomic_rcu_read(&(head)->lh_first)); \ |
127 (var) && \ | 127 (var) && \ |
128 ((next_var) = atomic_rcu_read(&(var)->field.le_next), 1); \ | 128 ((next_var) = qatomic_rcu_read(&(var)->field.le_next), 1); \ |
129 (var) = (next_var)) 130 131/* 132 * RCU simple queue 133 */ 134 135/* Simple queue access methods */ | 129 (var) = (next_var)) 130 131/* 132 * RCU simple queue 133 */ 134 135/* Simple queue access methods */ |
136#define QSIMPLEQ_EMPTY_RCU(head) (atomic_read(&(head)->sqh_first) == NULL) 137#define QSIMPLEQ_FIRST_RCU(head) atomic_rcu_read(&(head)->sqh_first) 138#define QSIMPLEQ_NEXT_RCU(elm, field) atomic_rcu_read(&(elm)->field.sqe_next) | 136#define QSIMPLEQ_EMPTY_RCU(head) \ 137 (qatomic_read(&(head)->sqh_first) == NULL) 138#define QSIMPLEQ_FIRST_RCU(head) qatomic_rcu_read(&(head)->sqh_first) 139#define QSIMPLEQ_NEXT_RCU(elm, field) qatomic_rcu_read(&(elm)->field.sqe_next) |
139 140/* Simple queue functions */ 141#define QSIMPLEQ_INSERT_HEAD_RCU(head, elm, field) do { \ 142 (elm)->field.sqe_next = (head)->sqh_first; \ 143 if ((elm)->field.sqe_next == NULL) { \ 144 (head)->sqh_last = &(elm)->field.sqe_next; \ 145 } \ | 140 141/* Simple queue functions */ 142#define QSIMPLEQ_INSERT_HEAD_RCU(head, elm, field) do { \ 143 (elm)->field.sqe_next = (head)->sqh_first; \ 144 if ((elm)->field.sqe_next == NULL) { \ 145 (head)->sqh_last = &(elm)->field.sqe_next; \ 146 } \ |
146 atomic_rcu_set(&(head)->sqh_first, (elm)); \ | 147 qatomic_rcu_set(&(head)->sqh_first, (elm)); \ |
147} while (/*CONSTCOND*/0) 148 149#define QSIMPLEQ_INSERT_TAIL_RCU(head, elm, field) do { \ 150 (elm)->field.sqe_next = NULL; \ | 148} while (/*CONSTCOND*/0) 149 150#define QSIMPLEQ_INSERT_TAIL_RCU(head, elm, field) do { \ 151 (elm)->field.sqe_next = NULL; \ |
151 atomic_rcu_set((head)->sqh_last, (elm)); \ | 152 qatomic_rcu_set((head)->sqh_last, (elm)); \ |
152 (head)->sqh_last = &(elm)->field.sqe_next; \ 153} while (/*CONSTCOND*/0) 154 155#define QSIMPLEQ_INSERT_AFTER_RCU(head, listelm, elm, field) do { \ 156 (elm)->field.sqe_next = (listelm)->field.sqe_next; \ 157 if ((elm)->field.sqe_next == NULL) { \ 158 (head)->sqh_last = &(elm)->field.sqe_next; \ 159 } \ | 153 (head)->sqh_last = &(elm)->field.sqe_next; \ 154} while (/*CONSTCOND*/0) 155 156#define QSIMPLEQ_INSERT_AFTER_RCU(head, listelm, elm, field) do { \ 157 (elm)->field.sqe_next = (listelm)->field.sqe_next; \ 158 if ((elm)->field.sqe_next == NULL) { \ 159 (head)->sqh_last = &(elm)->field.sqe_next; \ 160 } \ |
160 atomic_rcu_set(&(listelm)->field.sqe_next, (elm)); \ | 161 qatomic_rcu_set(&(listelm)->field.sqe_next, (elm)); \ |
161} while (/*CONSTCOND*/0) 162 163#define QSIMPLEQ_REMOVE_HEAD_RCU(head, field) do { \ | 162} while (/*CONSTCOND*/0) 163 164#define QSIMPLEQ_REMOVE_HEAD_RCU(head, field) do { \ |
164 atomic_set(&(head)->sqh_first, (head)->sqh_first->field.sqe_next); \ | 165 qatomic_set(&(head)->sqh_first, (head)->sqh_first->field.sqe_next);\ |
165 if ((head)->sqh_first == NULL) { \ 166 (head)->sqh_last = &(head)->sqh_first; \ 167 } \ 168} while (/*CONSTCOND*/0) 169 170#define QSIMPLEQ_REMOVE_RCU(head, elm, type, field) do { \ 171 if ((head)->sqh_first == (elm)) { \ 172 QSIMPLEQ_REMOVE_HEAD_RCU((head), field); \ 173 } else { \ 174 struct type *curr = (head)->sqh_first; \ 175 while (curr->field.sqe_next != (elm)) { \ 176 curr = curr->field.sqe_next; \ 177 } \ | 166 if ((head)->sqh_first == NULL) { \ 167 (head)->sqh_last = &(head)->sqh_first; \ 168 } \ 169} while (/*CONSTCOND*/0) 170 171#define QSIMPLEQ_REMOVE_RCU(head, elm, type, field) do { \ 172 if ((head)->sqh_first == (elm)) { \ 173 QSIMPLEQ_REMOVE_HEAD_RCU((head), field); \ 174 } else { \ 175 struct type *curr = (head)->sqh_first; \ 176 while (curr->field.sqe_next != (elm)) { \ 177 curr = curr->field.sqe_next; \ 178 } \ |
178 atomic_set(&curr->field.sqe_next, \ | 179 qatomic_set(&curr->field.sqe_next, \ |
179 curr->field.sqe_next->field.sqe_next); \ 180 if (curr->field.sqe_next == NULL) { \ 181 (head)->sqh_last = &(curr)->field.sqe_next; \ 182 } \ 183 } \ 184} while (/*CONSTCOND*/0) 185 186#define QSIMPLEQ_FOREACH_RCU(var, head, field) \ | 180 curr->field.sqe_next->field.sqe_next); \ 181 if (curr->field.sqe_next == NULL) { \ 182 (head)->sqh_last = &(curr)->field.sqe_next; \ 183 } \ 184 } \ 185} while (/*CONSTCOND*/0) 186 187#define QSIMPLEQ_FOREACH_RCU(var, head, field) \ |
187 for ((var) = atomic_rcu_read(&(head)->sqh_first); \ | 188 for ((var) = qatomic_rcu_read(&(head)->sqh_first); \ |
188 (var); \ | 189 (var); \ |
189 (var) = atomic_rcu_read(&(var)->field.sqe_next)) | 190 (var) = qatomic_rcu_read(&(var)->field.sqe_next)) |
190 191#define QSIMPLEQ_FOREACH_SAFE_RCU(var, head, field, next) \ | 191 192#define QSIMPLEQ_FOREACH_SAFE_RCU(var, head, field, next) \ |
192 for ((var) = atomic_rcu_read(&(head)->sqh_first); \ 193 (var) && ((next) = atomic_rcu_read(&(var)->field.sqe_next), 1); \ | 193 for ((var) = qatomic_rcu_read(&(head)->sqh_first); \ 194 (var) && ((next) = qatomic_rcu_read(&(var)->field.sqe_next), 1);\ |
194 (var) = (next)) 195 196/* 197 * RCU tail queue 198 */ 199 200/* Tail queue access methods */ | 195 (var) = (next)) 196 197/* 198 * RCU tail queue 199 */ 200 201/* Tail queue access methods */ |
201#define QTAILQ_EMPTY_RCU(head) (atomic_read(&(head)->tqh_first) == NULL) 202#define QTAILQ_FIRST_RCU(head) atomic_rcu_read(&(head)->tqh_first) 203#define QTAILQ_NEXT_RCU(elm, field) atomic_rcu_read(&(elm)->field.tqe_next) | 202#define QTAILQ_EMPTY_RCU(head) (qatomic_read(&(head)->tqh_first) == NULL) 203#define QTAILQ_FIRST_RCU(head) qatomic_rcu_read(&(head)->tqh_first) 204#define QTAILQ_NEXT_RCU(elm, field) qatomic_rcu_read(&(elm)->field.tqe_next) |
204 205/* Tail queue functions */ 206#define QTAILQ_INSERT_HEAD_RCU(head, elm, field) do { \ 207 (elm)->field.tqe_next = (head)->tqh_first; \ 208 if ((elm)->field.tqe_next != NULL) { \ 209 (head)->tqh_first->field.tqe_circ.tql_prev = \ 210 &(elm)->field.tqe_circ; \ 211 } else { \ 212 (head)->tqh_circ.tql_prev = &(elm)->field.tqe_circ; \ 213 } \ | 205 206/* Tail queue functions */ 207#define QTAILQ_INSERT_HEAD_RCU(head, elm, field) do { \ 208 (elm)->field.tqe_next = (head)->tqh_first; \ 209 if ((elm)->field.tqe_next != NULL) { \ 210 (head)->tqh_first->field.tqe_circ.tql_prev = \ 211 &(elm)->field.tqe_circ; \ 212 } else { \ 213 (head)->tqh_circ.tql_prev = &(elm)->field.tqe_circ; \ 214 } \ |
214 atomic_rcu_set(&(head)->tqh_first, (elm)); \ | 215 qatomic_rcu_set(&(head)->tqh_first, (elm)); \ |
215 (elm)->field.tqe_circ.tql_prev = &(head)->tqh_circ; \ 216} while (/*CONSTCOND*/0) 217 218#define QTAILQ_INSERT_TAIL_RCU(head, elm, field) do { \ 219 (elm)->field.tqe_next = NULL; \ 220 (elm)->field.tqe_circ.tql_prev = (head)->tqh_circ.tql_prev; \ | 216 (elm)->field.tqe_circ.tql_prev = &(head)->tqh_circ; \ 217} while (/*CONSTCOND*/0) 218 219#define QTAILQ_INSERT_TAIL_RCU(head, elm, field) do { \ 220 (elm)->field.tqe_next = NULL; \ 221 (elm)->field.tqe_circ.tql_prev = (head)->tqh_circ.tql_prev; \ |
221 atomic_rcu_set(&(head)->tqh_circ.tql_prev->tql_next, (elm)); \ | 222 qatomic_rcu_set(&(head)->tqh_circ.tql_prev->tql_next, (elm)); \ |
222 (head)->tqh_circ.tql_prev = &(elm)->field.tqe_circ; \ 223} while (/*CONSTCOND*/0) 224 225#define QTAILQ_INSERT_AFTER_RCU(head, listelm, elm, field) do { \ 226 (elm)->field.tqe_next = (listelm)->field.tqe_next; \ 227 if ((elm)->field.tqe_next != NULL) { \ 228 (elm)->field.tqe_next->field.tqe_circ.tql_prev = \ 229 &(elm)->field.tqe_circ; \ 230 } else { \ 231 (head)->tqh_circ.tql_prev = &(elm)->field.tqe_circ; \ 232 } \ | 223 (head)->tqh_circ.tql_prev = &(elm)->field.tqe_circ; \ 224} while (/*CONSTCOND*/0) 225 226#define QTAILQ_INSERT_AFTER_RCU(head, listelm, elm, field) do { \ 227 (elm)->field.tqe_next = (listelm)->field.tqe_next; \ 228 if ((elm)->field.tqe_next != NULL) { \ 229 (elm)->field.tqe_next->field.tqe_circ.tql_prev = \ 230 &(elm)->field.tqe_circ; \ 231 } else { \ 232 (head)->tqh_circ.tql_prev = &(elm)->field.tqe_circ; \ 233 } \ |
233 atomic_rcu_set(&(listelm)->field.tqe_next, (elm)); \ | 234 qatomic_rcu_set(&(listelm)->field.tqe_next, (elm)); \ |
234 (elm)->field.tqe_circ.tql_prev = &(listelm)->field.tqe_circ; \ 235} while (/*CONSTCOND*/0) 236 237#define QTAILQ_INSERT_BEFORE_RCU(listelm, elm, field) do { \ 238 (elm)->field.tqe_circ.tql_prev = (listelm)->field.tqe_circ.tql_prev; \ 239 (elm)->field.tqe_next = (listelm); \ | 235 (elm)->field.tqe_circ.tql_prev = &(listelm)->field.tqe_circ; \ 236} while (/*CONSTCOND*/0) 237 238#define QTAILQ_INSERT_BEFORE_RCU(listelm, elm, field) do { \ 239 (elm)->field.tqe_circ.tql_prev = (listelm)->field.tqe_circ.tql_prev; \ 240 (elm)->field.tqe_next = (listelm); \ |
240 atomic_rcu_set(&(listelm)->field.tqe_circ.tql_prev->tql_next, (elm)); \ | 241 qatomic_rcu_set(&(listelm)->field.tqe_circ.tql_prev->tql_next, (elm));\ |
241 (listelm)->field.tqe_circ.tql_prev = &(elm)->field.tqe_circ; \ 242} while (/*CONSTCOND*/0) 243 244#define QTAILQ_REMOVE_RCU(head, elm, field) do { \ 245 if (((elm)->field.tqe_next) != NULL) { \ 246 (elm)->field.tqe_next->field.tqe_circ.tql_prev = \ 247 (elm)->field.tqe_circ.tql_prev; \ 248 } else { \ 249 (head)->tqh_circ.tql_prev = (elm)->field.tqe_circ.tql_prev; \ 250 } \ | 242 (listelm)->field.tqe_circ.tql_prev = &(elm)->field.tqe_circ; \ 243} while (/*CONSTCOND*/0) 244 245#define QTAILQ_REMOVE_RCU(head, elm, field) do { \ 246 if (((elm)->field.tqe_next) != NULL) { \ 247 (elm)->field.tqe_next->field.tqe_circ.tql_prev = \ 248 (elm)->field.tqe_circ.tql_prev; \ 249 } else { \ 250 (head)->tqh_circ.tql_prev = (elm)->field.tqe_circ.tql_prev; \ 251 } \ |
251 atomic_set(&(elm)->field.tqe_circ.tql_prev->tql_next, (elm)->field.tqe_next); \ | 252 qatomic_set(&(elm)->field.tqe_circ.tql_prev->tql_next, \ 253 (elm)->field.tqe_next); \ |
252 (elm)->field.tqe_circ.tql_prev = NULL; \ 253} while (/*CONSTCOND*/0) 254 255#define QTAILQ_FOREACH_RCU(var, head, field) \ | 254 (elm)->field.tqe_circ.tql_prev = NULL; \ 255} while (/*CONSTCOND*/0) 256 257#define QTAILQ_FOREACH_RCU(var, head, field) \ |
256 for ((var) = atomic_rcu_read(&(head)->tqh_first); \ | 258 for ((var) = qatomic_rcu_read(&(head)->tqh_first); \ |
257 (var); \ | 259 (var); \ |
258 (var) = atomic_rcu_read(&(var)->field.tqe_next)) | 260 (var) = qatomic_rcu_read(&(var)->field.tqe_next)) |
259 260#define QTAILQ_FOREACH_SAFE_RCU(var, head, field, next) \ | 261 262#define QTAILQ_FOREACH_SAFE_RCU(var, head, field, next) \ |
261 for ((var) = atomic_rcu_read(&(head)->tqh_first); \ 262 (var) && ((next) = atomic_rcu_read(&(var)->field.tqe_next), 1); \ | 263 for ((var) = qatomic_rcu_read(&(head)->tqh_first); \ 264 (var) && ((next) = qatomic_rcu_read(&(var)->field.tqe_next), 1);\ |
263 (var) = (next)) 264 265/* 266 * RCU singly-linked list 267 */ 268 269/* Singly-linked list access methods */ | 265 (var) = (next)) 266 267/* 268 * RCU singly-linked list 269 */ 270 271/* Singly-linked list access methods */ |
270#define QSLIST_EMPTY_RCU(head) (atomic_read(&(head)->slh_first) == NULL) 271#define QSLIST_FIRST_RCU(head) atomic_rcu_read(&(head)->slh_first) 272#define QSLIST_NEXT_RCU(elm, field) atomic_rcu_read(&(elm)->field.sle_next) | 272#define QSLIST_EMPTY_RCU(head) (qatomic_read(&(head)->slh_first) == NULL) 273#define QSLIST_FIRST_RCU(head) qatomic_rcu_read(&(head)->slh_first) 274#define QSLIST_NEXT_RCU(elm, field) qatomic_rcu_read(&(elm)->field.sle_next) |
273 274/* Singly-linked list functions */ 275#define QSLIST_INSERT_HEAD_RCU(head, elm, field) do { \ 276 (elm)->field.sle_next = (head)->slh_first; \ | 275 276/* Singly-linked list functions */ 277#define QSLIST_INSERT_HEAD_RCU(head, elm, field) do { \ 278 (elm)->field.sle_next = (head)->slh_first; \ |
277 atomic_rcu_set(&(head)->slh_first, (elm)); \ | 279 qatomic_rcu_set(&(head)->slh_first, (elm)); \ |
278} while (/*CONSTCOND*/0) 279 280#define QSLIST_INSERT_AFTER_RCU(head, listelm, elm, field) do { \ 281 (elm)->field.sle_next = (listelm)->field.sle_next; \ | 280} while (/*CONSTCOND*/0) 281 282#define QSLIST_INSERT_AFTER_RCU(head, listelm, elm, field) do { \ 283 (elm)->field.sle_next = (listelm)->field.sle_next; \ |
282 atomic_rcu_set(&(listelm)->field.sle_next, (elm)); \ | 284 qatomic_rcu_set(&(listelm)->field.sle_next, (elm)); \ |
283} while (/*CONSTCOND*/0) 284 285#define QSLIST_REMOVE_HEAD_RCU(head, field) do { \ | 285} while (/*CONSTCOND*/0) 286 287#define QSLIST_REMOVE_HEAD_RCU(head, field) do { \ |
286 atomic_set(&(head)->slh_first, (head)->slh_first->field.sle_next); \ | 288 qatomic_set(&(head)->slh_first, (head)->slh_first->field.sle_next);\ |
287} while (/*CONSTCOND*/0) 288 289#define QSLIST_REMOVE_RCU(head, elm, type, field) do { \ 290 if ((head)->slh_first == (elm)) { \ 291 QSLIST_REMOVE_HEAD_RCU((head), field); \ 292 } else { \ 293 struct type *curr = (head)->slh_first; \ 294 while (curr->field.sle_next != (elm)) { \ 295 curr = curr->field.sle_next; \ 296 } \ | 289} while (/*CONSTCOND*/0) 290 291#define QSLIST_REMOVE_RCU(head, elm, type, field) do { \ 292 if ((head)->slh_first == (elm)) { \ 293 QSLIST_REMOVE_HEAD_RCU((head), field); \ 294 } else { \ 295 struct type *curr = (head)->slh_first; \ 296 while (curr->field.sle_next != (elm)) { \ 297 curr = curr->field.sle_next; \ 298 } \ |
297 atomic_set(&curr->field.sle_next, \ | 299 qatomic_set(&curr->field.sle_next, \ |
298 curr->field.sle_next->field.sle_next); \ 299 } \ 300} while (/*CONSTCOND*/0) 301 302#define QSLIST_FOREACH_RCU(var, head, field) \ | 300 curr->field.sle_next->field.sle_next); \ 301 } \ 302} while (/*CONSTCOND*/0) 303 304#define QSLIST_FOREACH_RCU(var, head, field) \ |
303 for ((var) = atomic_rcu_read(&(head)->slh_first); \ 304 (var); \ 305 (var) = atomic_rcu_read(&(var)->field.sle_next)) | 305 for ((var) = qatomic_rcu_read(&(head)->slh_first); \ 306 (var); \ 307 (var) = qatomic_rcu_read(&(var)->field.sle_next)) |
306 | 308 |
307#define QSLIST_FOREACH_SAFE_RCU(var, head, field, next) \ 308 for ((var) = atomic_rcu_read(&(head)->slh_first); \ 309 (var) && ((next) = atomic_rcu_read(&(var)->field.sle_next), 1); \ | 309#define QSLIST_FOREACH_SAFE_RCU(var, head, field, next) \ 310 for ((var) = qatomic_rcu_read(&(head)->slh_first); \ 311 (var) && ((next) = qatomic_rcu_read(&(var)->field.sle_next), 1); \ |
310 (var) = (next)) 311 312#ifdef __cplusplus 313} 314#endif 315#endif /* QEMU_RCU_QUEUE_H */ | 312 (var) = (next)) 313 314#ifdef __cplusplus 315} 316#endif 317#endif /* QEMU_RCU_QUEUE_H */ |