1 /* SPDX-License-Identifier: GPL-2.0 */ 2 #ifndef _ASM_GENERIC_PERCPU_H_ 3 #define _ASM_GENERIC_PERCPU_H_ 4 5 #include <linux/compiler.h> 6 #include <linux/threads.h> 7 #include <linux/percpu-defs.h> 8 9 #ifdef CONFIG_SMP 10 11 /* 12 * per_cpu_offset() is the offset that has to be added to a 13 * percpu variable to get to the instance for a certain processor. 14 * 15 * Most arches use the __per_cpu_offset array for those offsets but 16 * some arches have their own ways of determining the offset (x86_64, s390). 17 */ 18 #ifndef __per_cpu_offset 19 extern unsigned long __per_cpu_offset[NR_CPUS]; 20 21 #define per_cpu_offset(x) (__per_cpu_offset[x]) 22 #endif 23 24 /* 25 * Determine the offset for the currently active processor. 26 * An arch may define __my_cpu_offset to provide a more effective 27 * means of obtaining the offset to the per cpu variables of the 28 * current processor. 29 */ 30 #ifndef __my_cpu_offset 31 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id()) 32 #endif 33 #ifdef CONFIG_DEBUG_PREEMPT 34 #define my_cpu_offset per_cpu_offset(smp_processor_id()) 35 #else 36 #define my_cpu_offset __my_cpu_offset 37 #endif 38 39 /* 40 * Arch may define arch_raw_cpu_ptr() to provide more efficient address 41 * translations for raw_cpu_ptr(). 42 */ 43 #ifndef arch_raw_cpu_ptr 44 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset) 45 #endif 46 47 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA 48 extern void setup_per_cpu_areas(void); 49 #endif 50 51 #endif /* SMP */ 52 53 #ifndef PER_CPU_BASE_SECTION 54 #ifdef CONFIG_SMP 55 #define PER_CPU_BASE_SECTION ".data..percpu" 56 #else 57 #define PER_CPU_BASE_SECTION ".data" 58 #endif 59 #endif 60 61 #ifndef PER_CPU_ATTRIBUTES 62 #define PER_CPU_ATTRIBUTES 63 #endif 64 65 #define raw_cpu_generic_read(pcp) \ 66 ({ \ 67 *raw_cpu_ptr(&(pcp)); \ 68 }) 69 70 #define raw_cpu_generic_to_op(pcp, val, op) \ 71 do { \ 72 *raw_cpu_ptr(&(pcp)) op val; \ 73 } while (0) 74 75 #define raw_cpu_generic_add_return(pcp, val) \ 76 ({ \ 77 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \ 78 \ 79 *__p += val; \ 80 *__p; \ 81 }) 82 83 #define raw_cpu_generic_xchg(pcp, nval) \ 84 ({ \ 85 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \ 86 typeof(pcp) __ret; \ 87 __ret = *__p; \ 88 *__p = nval; \ 89 __ret; \ 90 }) 91 92 #define __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, _cmpxchg) \ 93 ({ \ 94 typeof(pcp) __val, __old = *(ovalp); \ 95 __val = _cmpxchg(pcp, __old, nval); \ 96 if (__val != __old) \ 97 *(ovalp) = __val; \ 98 __val == __old; \ 99 }) 100 101 #define raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) \ 102 ({ \ 103 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \ 104 typeof(pcp) __val = *__p, __old = *(ovalp); \ 105 bool __ret; \ 106 if (__val == __old) { \ 107 *__p = nval; \ 108 __ret = true; \ 109 } else { \ 110 *(ovalp) = __val; \ 111 __ret = false; \ 112 } \ 113 __ret; \ 114 }) 115 116 #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \ 117 ({ \ 118 typeof(pcp) __old = (oval); \ 119 raw_cpu_generic_try_cmpxchg(pcp, &__old, nval); \ 120 __old; \ 121 }) 122 123 #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 124 ({ \ 125 typeof(pcp1) *__p1 = raw_cpu_ptr(&(pcp1)); \ 126 typeof(pcp2) *__p2 = raw_cpu_ptr(&(pcp2)); \ 127 int __ret = 0; \ 128 if (*__p1 == (oval1) && *__p2 == (oval2)) { \ 129 *__p1 = nval1; \ 130 *__p2 = nval2; \ 131 __ret = 1; \ 132 } \ 133 (__ret); \ 134 }) 135 136 #define __this_cpu_generic_read_nopreempt(pcp) \ 137 ({ \ 138 typeof(pcp) ___ret; \ 139 preempt_disable_notrace(); \ 140 ___ret = READ_ONCE(*raw_cpu_ptr(&(pcp))); \ 141 preempt_enable_notrace(); \ 142 ___ret; \ 143 }) 144 145 #define __this_cpu_generic_read_noirq(pcp) \ 146 ({ \ 147 typeof(pcp) ___ret; \ 148 unsigned long ___flags; \ 149 raw_local_irq_save(___flags); \ 150 ___ret = raw_cpu_generic_read(pcp); \ 151 raw_local_irq_restore(___flags); \ 152 ___ret; \ 153 }) 154 155 #define this_cpu_generic_read(pcp) \ 156 ({ \ 157 typeof(pcp) __ret; \ 158 if (__native_word(pcp)) \ 159 __ret = __this_cpu_generic_read_nopreempt(pcp); \ 160 else \ 161 __ret = __this_cpu_generic_read_noirq(pcp); \ 162 __ret; \ 163 }) 164 165 #define this_cpu_generic_to_op(pcp, val, op) \ 166 do { \ 167 unsigned long __flags; \ 168 raw_local_irq_save(__flags); \ 169 raw_cpu_generic_to_op(pcp, val, op); \ 170 raw_local_irq_restore(__flags); \ 171 } while (0) 172 173 174 #define this_cpu_generic_add_return(pcp, val) \ 175 ({ \ 176 typeof(pcp) __ret; \ 177 unsigned long __flags; \ 178 raw_local_irq_save(__flags); \ 179 __ret = raw_cpu_generic_add_return(pcp, val); \ 180 raw_local_irq_restore(__flags); \ 181 __ret; \ 182 }) 183 184 #define this_cpu_generic_xchg(pcp, nval) \ 185 ({ \ 186 typeof(pcp) __ret; \ 187 unsigned long __flags; \ 188 raw_local_irq_save(__flags); \ 189 __ret = raw_cpu_generic_xchg(pcp, nval); \ 190 raw_local_irq_restore(__flags); \ 191 __ret; \ 192 }) 193 194 #define this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) \ 195 ({ \ 196 bool __ret; \ 197 unsigned long __flags; \ 198 raw_local_irq_save(__flags); \ 199 __ret = raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval); \ 200 raw_local_irq_restore(__flags); \ 201 __ret; \ 202 }) 203 204 #define this_cpu_generic_cmpxchg(pcp, oval, nval) \ 205 ({ \ 206 typeof(pcp) __ret; \ 207 unsigned long __flags; \ 208 raw_local_irq_save(__flags); \ 209 __ret = raw_cpu_generic_cmpxchg(pcp, oval, nval); \ 210 raw_local_irq_restore(__flags); \ 211 __ret; \ 212 }) 213 214 #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 215 ({ \ 216 int __ret; \ 217 unsigned long __flags; \ 218 raw_local_irq_save(__flags); \ 219 __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \ 220 oval1, oval2, nval1, nval2); \ 221 raw_local_irq_restore(__flags); \ 222 __ret; \ 223 }) 224 225 #ifndef raw_cpu_read_1 226 #define raw_cpu_read_1(pcp) raw_cpu_generic_read(pcp) 227 #endif 228 #ifndef raw_cpu_read_2 229 #define raw_cpu_read_2(pcp) raw_cpu_generic_read(pcp) 230 #endif 231 #ifndef raw_cpu_read_4 232 #define raw_cpu_read_4(pcp) raw_cpu_generic_read(pcp) 233 #endif 234 #ifndef raw_cpu_read_8 235 #define raw_cpu_read_8(pcp) raw_cpu_generic_read(pcp) 236 #endif 237 238 #ifndef raw_cpu_write_1 239 #define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =) 240 #endif 241 #ifndef raw_cpu_write_2 242 #define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =) 243 #endif 244 #ifndef raw_cpu_write_4 245 #define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =) 246 #endif 247 #ifndef raw_cpu_write_8 248 #define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =) 249 #endif 250 251 #ifndef raw_cpu_add_1 252 #define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=) 253 #endif 254 #ifndef raw_cpu_add_2 255 #define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=) 256 #endif 257 #ifndef raw_cpu_add_4 258 #define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=) 259 #endif 260 #ifndef raw_cpu_add_8 261 #define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=) 262 #endif 263 264 #ifndef raw_cpu_and_1 265 #define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=) 266 #endif 267 #ifndef raw_cpu_and_2 268 #define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=) 269 #endif 270 #ifndef raw_cpu_and_4 271 #define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=) 272 #endif 273 #ifndef raw_cpu_and_8 274 #define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=) 275 #endif 276 277 #ifndef raw_cpu_or_1 278 #define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=) 279 #endif 280 #ifndef raw_cpu_or_2 281 #define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=) 282 #endif 283 #ifndef raw_cpu_or_4 284 #define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=) 285 #endif 286 #ifndef raw_cpu_or_8 287 #define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=) 288 #endif 289 290 #ifndef raw_cpu_add_return_1 291 #define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val) 292 #endif 293 #ifndef raw_cpu_add_return_2 294 #define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val) 295 #endif 296 #ifndef raw_cpu_add_return_4 297 #define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val) 298 #endif 299 #ifndef raw_cpu_add_return_8 300 #define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val) 301 #endif 302 303 #ifndef raw_cpu_xchg_1 304 #define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval) 305 #endif 306 #ifndef raw_cpu_xchg_2 307 #define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval) 308 #endif 309 #ifndef raw_cpu_xchg_4 310 #define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval) 311 #endif 312 #ifndef raw_cpu_xchg_8 313 #define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval) 314 #endif 315 316 #ifndef raw_cpu_try_cmpxchg_1 317 #ifdef raw_cpu_cmpxchg_1 318 #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \ 319 __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_1) 320 #else 321 #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \ 322 raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) 323 #endif 324 #endif 325 #ifndef raw_cpu_try_cmpxchg_2 326 #ifdef raw_cpu_cmpxchg_2 327 #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \ 328 __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_2) 329 #else 330 #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \ 331 raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) 332 #endif 333 #endif 334 #ifndef raw_cpu_try_cmpxchg_4 335 #ifdef raw_cpu_cmpxchg_4 336 #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \ 337 __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_4) 338 #else 339 #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \ 340 raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) 341 #endif 342 #endif 343 #ifndef raw_cpu_try_cmpxchg_8 344 #ifdef raw_cpu_cmpxchg_8 345 #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \ 346 __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_8) 347 #else 348 #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \ 349 raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) 350 #endif 351 #endif 352 353 #ifndef raw_cpu_cmpxchg_1 354 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \ 355 raw_cpu_generic_cmpxchg(pcp, oval, nval) 356 #endif 357 #ifndef raw_cpu_cmpxchg_2 358 #define raw_cpu_cmpxchg_2(pcp, oval, nval) \ 359 raw_cpu_generic_cmpxchg(pcp, oval, nval) 360 #endif 361 #ifndef raw_cpu_cmpxchg_4 362 #define raw_cpu_cmpxchg_4(pcp, oval, nval) \ 363 raw_cpu_generic_cmpxchg(pcp, oval, nval) 364 #endif 365 #ifndef raw_cpu_cmpxchg_8 366 #define raw_cpu_cmpxchg_8(pcp, oval, nval) \ 367 raw_cpu_generic_cmpxchg(pcp, oval, nval) 368 #endif 369 370 #ifndef raw_cpu_cmpxchg_double_1 371 #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 372 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) 373 #endif 374 #ifndef raw_cpu_cmpxchg_double_2 375 #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 376 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) 377 #endif 378 #ifndef raw_cpu_cmpxchg_double_4 379 #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 380 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) 381 #endif 382 #ifndef raw_cpu_cmpxchg_double_8 383 #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 384 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) 385 #endif 386 387 #ifndef this_cpu_read_1 388 #define this_cpu_read_1(pcp) this_cpu_generic_read(pcp) 389 #endif 390 #ifndef this_cpu_read_2 391 #define this_cpu_read_2(pcp) this_cpu_generic_read(pcp) 392 #endif 393 #ifndef this_cpu_read_4 394 #define this_cpu_read_4(pcp) this_cpu_generic_read(pcp) 395 #endif 396 #ifndef this_cpu_read_8 397 #define this_cpu_read_8(pcp) this_cpu_generic_read(pcp) 398 #endif 399 400 #ifndef this_cpu_write_1 401 #define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =) 402 #endif 403 #ifndef this_cpu_write_2 404 #define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =) 405 #endif 406 #ifndef this_cpu_write_4 407 #define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =) 408 #endif 409 #ifndef this_cpu_write_8 410 #define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =) 411 #endif 412 413 #ifndef this_cpu_add_1 414 #define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=) 415 #endif 416 #ifndef this_cpu_add_2 417 #define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=) 418 #endif 419 #ifndef this_cpu_add_4 420 #define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=) 421 #endif 422 #ifndef this_cpu_add_8 423 #define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=) 424 #endif 425 426 #ifndef this_cpu_and_1 427 #define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=) 428 #endif 429 #ifndef this_cpu_and_2 430 #define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=) 431 #endif 432 #ifndef this_cpu_and_4 433 #define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=) 434 #endif 435 #ifndef this_cpu_and_8 436 #define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=) 437 #endif 438 439 #ifndef this_cpu_or_1 440 #define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=) 441 #endif 442 #ifndef this_cpu_or_2 443 #define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=) 444 #endif 445 #ifndef this_cpu_or_4 446 #define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=) 447 #endif 448 #ifndef this_cpu_or_8 449 #define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=) 450 #endif 451 452 #ifndef this_cpu_add_return_1 453 #define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val) 454 #endif 455 #ifndef this_cpu_add_return_2 456 #define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val) 457 #endif 458 #ifndef this_cpu_add_return_4 459 #define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val) 460 #endif 461 #ifndef this_cpu_add_return_8 462 #define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val) 463 #endif 464 465 #ifndef this_cpu_xchg_1 466 #define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval) 467 #endif 468 #ifndef this_cpu_xchg_2 469 #define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval) 470 #endif 471 #ifndef this_cpu_xchg_4 472 #define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval) 473 #endif 474 #ifndef this_cpu_xchg_8 475 #define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval) 476 #endif 477 478 #ifndef this_cpu_try_cmpxchg_1 479 #ifdef this_cpu_cmpxchg_1 480 #define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \ 481 __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_1) 482 #else 483 #define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \ 484 this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) 485 #endif 486 #endif 487 #ifndef this_cpu_try_cmpxchg_2 488 #ifdef this_cpu_cmpxchg_2 489 #define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \ 490 __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_2) 491 #else 492 #define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \ 493 this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) 494 #endif 495 #endif 496 #ifndef this_cpu_try_cmpxchg_4 497 #ifdef this_cpu_cmpxchg_4 498 #define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \ 499 __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_4) 500 #else 501 #define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \ 502 this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) 503 #endif 504 #endif 505 #ifndef this_cpu_try_cmpxchg_8 506 #ifdef this_cpu_cmpxchg_8 507 #define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \ 508 __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_8) 509 #else 510 #define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \ 511 this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) 512 #endif 513 #endif 514 515 #ifndef this_cpu_cmpxchg_1 516 #define this_cpu_cmpxchg_1(pcp, oval, nval) \ 517 this_cpu_generic_cmpxchg(pcp, oval, nval) 518 #endif 519 #ifndef this_cpu_cmpxchg_2 520 #define this_cpu_cmpxchg_2(pcp, oval, nval) \ 521 this_cpu_generic_cmpxchg(pcp, oval, nval) 522 #endif 523 #ifndef this_cpu_cmpxchg_4 524 #define this_cpu_cmpxchg_4(pcp, oval, nval) \ 525 this_cpu_generic_cmpxchg(pcp, oval, nval) 526 #endif 527 #ifndef this_cpu_cmpxchg_8 528 #define this_cpu_cmpxchg_8(pcp, oval, nval) \ 529 this_cpu_generic_cmpxchg(pcp, oval, nval) 530 #endif 531 532 #ifndef this_cpu_cmpxchg_double_1 533 #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 534 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) 535 #endif 536 #ifndef this_cpu_cmpxchg_double_2 537 #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 538 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) 539 #endif 540 #ifndef this_cpu_cmpxchg_double_4 541 #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 542 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) 543 #endif 544 #ifndef this_cpu_cmpxchg_double_8 545 #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \ 546 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) 547 #endif 548 549 #endif /* _ASM_GENERIC_PERCPU_H_ */ 550