atomic-long.h (22336B)
1// SPDX-License-Identifier: GPL-2.0 2 3// Generated by scripts/atomic/gen-atomic-long.sh 4// DO NOT MODIFY THIS FILE DIRECTLY 5 6#ifndef _LINUX_ATOMIC_LONG_H 7#define _LINUX_ATOMIC_LONG_H 8 9#include <linux/compiler.h> 10#include <asm/types.h> 11 12#ifdef CONFIG_64BIT 13typedef atomic64_t atomic_long_t; 14#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i) 15#define atomic_long_cond_read_acquire atomic64_cond_read_acquire 16#define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed 17#else 18typedef atomic_t atomic_long_t; 19#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i) 20#define atomic_long_cond_read_acquire atomic_cond_read_acquire 21#define atomic_long_cond_read_relaxed atomic_cond_read_relaxed 22#endif 23 24#ifdef CONFIG_64BIT 25 26static __always_inline long 27arch_atomic_long_read(const atomic_long_t *v) 28{ 29 return arch_atomic64_read(v); 30} 31 32static __always_inline long 33arch_atomic_long_read_acquire(const atomic_long_t *v) 34{ 35 return arch_atomic64_read_acquire(v); 36} 37 38static __always_inline void 39arch_atomic_long_set(atomic_long_t *v, long i) 40{ 41 arch_atomic64_set(v, i); 42} 43 44static __always_inline void 45arch_atomic_long_set_release(atomic_long_t *v, long i) 46{ 47 arch_atomic64_set_release(v, i); 48} 49 50static __always_inline void 51arch_atomic_long_add(long i, atomic_long_t *v) 52{ 53 arch_atomic64_add(i, v); 54} 55 56static __always_inline long 57arch_atomic_long_add_return(long i, atomic_long_t *v) 58{ 59 return arch_atomic64_add_return(i, v); 60} 61 62static __always_inline long 63arch_atomic_long_add_return_acquire(long i, atomic_long_t *v) 64{ 65 return arch_atomic64_add_return_acquire(i, v); 66} 67 68static __always_inline long 69arch_atomic_long_add_return_release(long i, atomic_long_t *v) 70{ 71 return arch_atomic64_add_return_release(i, v); 72} 73 74static __always_inline long 75arch_atomic_long_add_return_relaxed(long i, atomic_long_t *v) 76{ 77 return arch_atomic64_add_return_relaxed(i, v); 78} 79 80static __always_inline long 81arch_atomic_long_fetch_add(long i, atomic_long_t *v) 82{ 83 return arch_atomic64_fetch_add(i, v); 84} 85 86static __always_inline long 87arch_atomic_long_fetch_add_acquire(long i, atomic_long_t *v) 88{ 89 return arch_atomic64_fetch_add_acquire(i, v); 90} 91 92static __always_inline long 93arch_atomic_long_fetch_add_release(long i, atomic_long_t *v) 94{ 95 return arch_atomic64_fetch_add_release(i, v); 96} 97 98static __always_inline long 99arch_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v) 100{ 101 return arch_atomic64_fetch_add_relaxed(i, v); 102} 103 104static __always_inline void 105arch_atomic_long_sub(long i, atomic_long_t *v) 106{ 107 arch_atomic64_sub(i, v); 108} 109 110static __always_inline long 111arch_atomic_long_sub_return(long i, atomic_long_t *v) 112{ 113 return arch_atomic64_sub_return(i, v); 114} 115 116static __always_inline long 117arch_atomic_long_sub_return_acquire(long i, atomic_long_t *v) 118{ 119 return arch_atomic64_sub_return_acquire(i, v); 120} 121 122static __always_inline long 123arch_atomic_long_sub_return_release(long i, atomic_long_t *v) 124{ 125 return arch_atomic64_sub_return_release(i, v); 126} 127 128static __always_inline long 129arch_atomic_long_sub_return_relaxed(long i, atomic_long_t *v) 130{ 131 return arch_atomic64_sub_return_relaxed(i, v); 132} 133 134static __always_inline long 135arch_atomic_long_fetch_sub(long i, atomic_long_t *v) 136{ 137 return arch_atomic64_fetch_sub(i, v); 138} 139 140static __always_inline long 141arch_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v) 142{ 143 return arch_atomic64_fetch_sub_acquire(i, v); 144} 145 146static __always_inline long 147arch_atomic_long_fetch_sub_release(long i, atomic_long_t *v) 148{ 149 return arch_atomic64_fetch_sub_release(i, v); 150} 151 152static __always_inline long 153arch_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v) 154{ 155 return arch_atomic64_fetch_sub_relaxed(i, v); 156} 157 158static __always_inline void 159arch_atomic_long_inc(atomic_long_t *v) 160{ 161 arch_atomic64_inc(v); 162} 163 164static __always_inline long 165arch_atomic_long_inc_return(atomic_long_t *v) 166{ 167 return arch_atomic64_inc_return(v); 168} 169 170static __always_inline long 171arch_atomic_long_inc_return_acquire(atomic_long_t *v) 172{ 173 return arch_atomic64_inc_return_acquire(v); 174} 175 176static __always_inline long 177arch_atomic_long_inc_return_release(atomic_long_t *v) 178{ 179 return arch_atomic64_inc_return_release(v); 180} 181 182static __always_inline long 183arch_atomic_long_inc_return_relaxed(atomic_long_t *v) 184{ 185 return arch_atomic64_inc_return_relaxed(v); 186} 187 188static __always_inline long 189arch_atomic_long_fetch_inc(atomic_long_t *v) 190{ 191 return arch_atomic64_fetch_inc(v); 192} 193 194static __always_inline long 195arch_atomic_long_fetch_inc_acquire(atomic_long_t *v) 196{ 197 return arch_atomic64_fetch_inc_acquire(v); 198} 199 200static __always_inline long 201arch_atomic_long_fetch_inc_release(atomic_long_t *v) 202{ 203 return arch_atomic64_fetch_inc_release(v); 204} 205 206static __always_inline long 207arch_atomic_long_fetch_inc_relaxed(atomic_long_t *v) 208{ 209 return arch_atomic64_fetch_inc_relaxed(v); 210} 211 212static __always_inline void 213arch_atomic_long_dec(atomic_long_t *v) 214{ 215 arch_atomic64_dec(v); 216} 217 218static __always_inline long 219arch_atomic_long_dec_return(atomic_long_t *v) 220{ 221 return arch_atomic64_dec_return(v); 222} 223 224static __always_inline long 225arch_atomic_long_dec_return_acquire(atomic_long_t *v) 226{ 227 return arch_atomic64_dec_return_acquire(v); 228} 229 230static __always_inline long 231arch_atomic_long_dec_return_release(atomic_long_t *v) 232{ 233 return arch_atomic64_dec_return_release(v); 234} 235 236static __always_inline long 237arch_atomic_long_dec_return_relaxed(atomic_long_t *v) 238{ 239 return arch_atomic64_dec_return_relaxed(v); 240} 241 242static __always_inline long 243arch_atomic_long_fetch_dec(atomic_long_t *v) 244{ 245 return arch_atomic64_fetch_dec(v); 246} 247 248static __always_inline long 249arch_atomic_long_fetch_dec_acquire(atomic_long_t *v) 250{ 251 return arch_atomic64_fetch_dec_acquire(v); 252} 253 254static __always_inline long 255arch_atomic_long_fetch_dec_release(atomic_long_t *v) 256{ 257 return arch_atomic64_fetch_dec_release(v); 258} 259 260static __always_inline long 261arch_atomic_long_fetch_dec_relaxed(atomic_long_t *v) 262{ 263 return arch_atomic64_fetch_dec_relaxed(v); 264} 265 266static __always_inline void 267arch_atomic_long_and(long i, atomic_long_t *v) 268{ 269 arch_atomic64_and(i, v); 270} 271 272static __always_inline long 273arch_atomic_long_fetch_and(long i, atomic_long_t *v) 274{ 275 return arch_atomic64_fetch_and(i, v); 276} 277 278static __always_inline long 279arch_atomic_long_fetch_and_acquire(long i, atomic_long_t *v) 280{ 281 return arch_atomic64_fetch_and_acquire(i, v); 282} 283 284static __always_inline long 285arch_atomic_long_fetch_and_release(long i, atomic_long_t *v) 286{ 287 return arch_atomic64_fetch_and_release(i, v); 288} 289 290static __always_inline long 291arch_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v) 292{ 293 return arch_atomic64_fetch_and_relaxed(i, v); 294} 295 296static __always_inline void 297arch_atomic_long_andnot(long i, atomic_long_t *v) 298{ 299 arch_atomic64_andnot(i, v); 300} 301 302static __always_inline long 303arch_atomic_long_fetch_andnot(long i, atomic_long_t *v) 304{ 305 return arch_atomic64_fetch_andnot(i, v); 306} 307 308static __always_inline long 309arch_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v) 310{ 311 return arch_atomic64_fetch_andnot_acquire(i, v); 312} 313 314static __always_inline long 315arch_atomic_long_fetch_andnot_release(long i, atomic_long_t *v) 316{ 317 return arch_atomic64_fetch_andnot_release(i, v); 318} 319 320static __always_inline long 321arch_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v) 322{ 323 return arch_atomic64_fetch_andnot_relaxed(i, v); 324} 325 326static __always_inline void 327arch_atomic_long_or(long i, atomic_long_t *v) 328{ 329 arch_atomic64_or(i, v); 330} 331 332static __always_inline long 333arch_atomic_long_fetch_or(long i, atomic_long_t *v) 334{ 335 return arch_atomic64_fetch_or(i, v); 336} 337 338static __always_inline long 339arch_atomic_long_fetch_or_acquire(long i, atomic_long_t *v) 340{ 341 return arch_atomic64_fetch_or_acquire(i, v); 342} 343 344static __always_inline long 345arch_atomic_long_fetch_or_release(long i, atomic_long_t *v) 346{ 347 return arch_atomic64_fetch_or_release(i, v); 348} 349 350static __always_inline long 351arch_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v) 352{ 353 return arch_atomic64_fetch_or_relaxed(i, v); 354} 355 356static __always_inline void 357arch_atomic_long_xor(long i, atomic_long_t *v) 358{ 359 arch_atomic64_xor(i, v); 360} 361 362static __always_inline long 363arch_atomic_long_fetch_xor(long i, atomic_long_t *v) 364{ 365 return arch_atomic64_fetch_xor(i, v); 366} 367 368static __always_inline long 369arch_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v) 370{ 371 return arch_atomic64_fetch_xor_acquire(i, v); 372} 373 374static __always_inline long 375arch_atomic_long_fetch_xor_release(long i, atomic_long_t *v) 376{ 377 return arch_atomic64_fetch_xor_release(i, v); 378} 379 380static __always_inline long 381arch_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) 382{ 383 return arch_atomic64_fetch_xor_relaxed(i, v); 384} 385 386static __always_inline long 387arch_atomic_long_xchg(atomic_long_t *v, long i) 388{ 389 return arch_atomic64_xchg(v, i); 390} 391 392static __always_inline long 393arch_atomic_long_xchg_acquire(atomic_long_t *v, long i) 394{ 395 return arch_atomic64_xchg_acquire(v, i); 396} 397 398static __always_inline long 399arch_atomic_long_xchg_release(atomic_long_t *v, long i) 400{ 401 return arch_atomic64_xchg_release(v, i); 402} 403 404static __always_inline long 405arch_atomic_long_xchg_relaxed(atomic_long_t *v, long i) 406{ 407 return arch_atomic64_xchg_relaxed(v, i); 408} 409 410static __always_inline long 411arch_atomic_long_cmpxchg(atomic_long_t *v, long old, long new) 412{ 413 return arch_atomic64_cmpxchg(v, old, new); 414} 415 416static __always_inline long 417arch_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new) 418{ 419 return arch_atomic64_cmpxchg_acquire(v, old, new); 420} 421 422static __always_inline long 423arch_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new) 424{ 425 return arch_atomic64_cmpxchg_release(v, old, new); 426} 427 428static __always_inline long 429arch_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new) 430{ 431 return arch_atomic64_cmpxchg_relaxed(v, old, new); 432} 433 434static __always_inline bool 435arch_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new) 436{ 437 return arch_atomic64_try_cmpxchg(v, (s64 *)old, new); 438} 439 440static __always_inline bool 441arch_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new) 442{ 443 return arch_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new); 444} 445 446static __always_inline bool 447arch_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new) 448{ 449 return arch_atomic64_try_cmpxchg_release(v, (s64 *)old, new); 450} 451 452static __always_inline bool 453arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new) 454{ 455 return arch_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new); 456} 457 458static __always_inline bool 459arch_atomic_long_sub_and_test(long i, atomic_long_t *v) 460{ 461 return arch_atomic64_sub_and_test(i, v); 462} 463 464static __always_inline bool 465arch_atomic_long_dec_and_test(atomic_long_t *v) 466{ 467 return arch_atomic64_dec_and_test(v); 468} 469 470static __always_inline bool 471arch_atomic_long_inc_and_test(atomic_long_t *v) 472{ 473 return arch_atomic64_inc_and_test(v); 474} 475 476static __always_inline bool 477arch_atomic_long_add_negative(long i, atomic_long_t *v) 478{ 479 return arch_atomic64_add_negative(i, v); 480} 481 482static __always_inline long 483arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 484{ 485 return arch_atomic64_fetch_add_unless(v, a, u); 486} 487 488static __always_inline bool 489arch_atomic_long_add_unless(atomic_long_t *v, long a, long u) 490{ 491 return arch_atomic64_add_unless(v, a, u); 492} 493 494static __always_inline bool 495arch_atomic_long_inc_not_zero(atomic_long_t *v) 496{ 497 return arch_atomic64_inc_not_zero(v); 498} 499 500static __always_inline bool 501arch_atomic_long_inc_unless_negative(atomic_long_t *v) 502{ 503 return arch_atomic64_inc_unless_negative(v); 504} 505 506static __always_inline bool 507arch_atomic_long_dec_unless_positive(atomic_long_t *v) 508{ 509 return arch_atomic64_dec_unless_positive(v); 510} 511 512static __always_inline long 513arch_atomic_long_dec_if_positive(atomic_long_t *v) 514{ 515 return arch_atomic64_dec_if_positive(v); 516} 517 518#else /* CONFIG_64BIT */ 519 520static __always_inline long 521arch_atomic_long_read(const atomic_long_t *v) 522{ 523 return arch_atomic_read(v); 524} 525 526static __always_inline long 527arch_atomic_long_read_acquire(const atomic_long_t *v) 528{ 529 return arch_atomic_read_acquire(v); 530} 531 532static __always_inline void 533arch_atomic_long_set(atomic_long_t *v, long i) 534{ 535 arch_atomic_set(v, i); 536} 537 538static __always_inline void 539arch_atomic_long_set_release(atomic_long_t *v, long i) 540{ 541 arch_atomic_set_release(v, i); 542} 543 544static __always_inline void 545arch_atomic_long_add(long i, atomic_long_t *v) 546{ 547 arch_atomic_add(i, v); 548} 549 550static __always_inline long 551arch_atomic_long_add_return(long i, atomic_long_t *v) 552{ 553 return arch_atomic_add_return(i, v); 554} 555 556static __always_inline long 557arch_atomic_long_add_return_acquire(long i, atomic_long_t *v) 558{ 559 return arch_atomic_add_return_acquire(i, v); 560} 561 562static __always_inline long 563arch_atomic_long_add_return_release(long i, atomic_long_t *v) 564{ 565 return arch_atomic_add_return_release(i, v); 566} 567 568static __always_inline long 569arch_atomic_long_add_return_relaxed(long i, atomic_long_t *v) 570{ 571 return arch_atomic_add_return_relaxed(i, v); 572} 573 574static __always_inline long 575arch_atomic_long_fetch_add(long i, atomic_long_t *v) 576{ 577 return arch_atomic_fetch_add(i, v); 578} 579 580static __always_inline long 581arch_atomic_long_fetch_add_acquire(long i, atomic_long_t *v) 582{ 583 return arch_atomic_fetch_add_acquire(i, v); 584} 585 586static __always_inline long 587arch_atomic_long_fetch_add_release(long i, atomic_long_t *v) 588{ 589 return arch_atomic_fetch_add_release(i, v); 590} 591 592static __always_inline long 593arch_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v) 594{ 595 return arch_atomic_fetch_add_relaxed(i, v); 596} 597 598static __always_inline void 599arch_atomic_long_sub(long i, atomic_long_t *v) 600{ 601 arch_atomic_sub(i, v); 602} 603 604static __always_inline long 605arch_atomic_long_sub_return(long i, atomic_long_t *v) 606{ 607 return arch_atomic_sub_return(i, v); 608} 609 610static __always_inline long 611arch_atomic_long_sub_return_acquire(long i, atomic_long_t *v) 612{ 613 return arch_atomic_sub_return_acquire(i, v); 614} 615 616static __always_inline long 617arch_atomic_long_sub_return_release(long i, atomic_long_t *v) 618{ 619 return arch_atomic_sub_return_release(i, v); 620} 621 622static __always_inline long 623arch_atomic_long_sub_return_relaxed(long i, atomic_long_t *v) 624{ 625 return arch_atomic_sub_return_relaxed(i, v); 626} 627 628static __always_inline long 629arch_atomic_long_fetch_sub(long i, atomic_long_t *v) 630{ 631 return arch_atomic_fetch_sub(i, v); 632} 633 634static __always_inline long 635arch_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v) 636{ 637 return arch_atomic_fetch_sub_acquire(i, v); 638} 639 640static __always_inline long 641arch_atomic_long_fetch_sub_release(long i, atomic_long_t *v) 642{ 643 return arch_atomic_fetch_sub_release(i, v); 644} 645 646static __always_inline long 647arch_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v) 648{ 649 return arch_atomic_fetch_sub_relaxed(i, v); 650} 651 652static __always_inline void 653arch_atomic_long_inc(atomic_long_t *v) 654{ 655 arch_atomic_inc(v); 656} 657 658static __always_inline long 659arch_atomic_long_inc_return(atomic_long_t *v) 660{ 661 return arch_atomic_inc_return(v); 662} 663 664static __always_inline long 665arch_atomic_long_inc_return_acquire(atomic_long_t *v) 666{ 667 return arch_atomic_inc_return_acquire(v); 668} 669 670static __always_inline long 671arch_atomic_long_inc_return_release(atomic_long_t *v) 672{ 673 return arch_atomic_inc_return_release(v); 674} 675 676static __always_inline long 677arch_atomic_long_inc_return_relaxed(atomic_long_t *v) 678{ 679 return arch_atomic_inc_return_relaxed(v); 680} 681 682static __always_inline long 683arch_atomic_long_fetch_inc(atomic_long_t *v) 684{ 685 return arch_atomic_fetch_inc(v); 686} 687 688static __always_inline long 689arch_atomic_long_fetch_inc_acquire(atomic_long_t *v) 690{ 691 return arch_atomic_fetch_inc_acquire(v); 692} 693 694static __always_inline long 695arch_atomic_long_fetch_inc_release(atomic_long_t *v) 696{ 697 return arch_atomic_fetch_inc_release(v); 698} 699 700static __always_inline long 701arch_atomic_long_fetch_inc_relaxed(atomic_long_t *v) 702{ 703 return arch_atomic_fetch_inc_relaxed(v); 704} 705 706static __always_inline void 707arch_atomic_long_dec(atomic_long_t *v) 708{ 709 arch_atomic_dec(v); 710} 711 712static __always_inline long 713arch_atomic_long_dec_return(atomic_long_t *v) 714{ 715 return arch_atomic_dec_return(v); 716} 717 718static __always_inline long 719arch_atomic_long_dec_return_acquire(atomic_long_t *v) 720{ 721 return arch_atomic_dec_return_acquire(v); 722} 723 724static __always_inline long 725arch_atomic_long_dec_return_release(atomic_long_t *v) 726{ 727 return arch_atomic_dec_return_release(v); 728} 729 730static __always_inline long 731arch_atomic_long_dec_return_relaxed(atomic_long_t *v) 732{ 733 return arch_atomic_dec_return_relaxed(v); 734} 735 736static __always_inline long 737arch_atomic_long_fetch_dec(atomic_long_t *v) 738{ 739 return arch_atomic_fetch_dec(v); 740} 741 742static __always_inline long 743arch_atomic_long_fetch_dec_acquire(atomic_long_t *v) 744{ 745 return arch_atomic_fetch_dec_acquire(v); 746} 747 748static __always_inline long 749arch_atomic_long_fetch_dec_release(atomic_long_t *v) 750{ 751 return arch_atomic_fetch_dec_release(v); 752} 753 754static __always_inline long 755arch_atomic_long_fetch_dec_relaxed(atomic_long_t *v) 756{ 757 return arch_atomic_fetch_dec_relaxed(v); 758} 759 760static __always_inline void 761arch_atomic_long_and(long i, atomic_long_t *v) 762{ 763 arch_atomic_and(i, v); 764} 765 766static __always_inline long 767arch_atomic_long_fetch_and(long i, atomic_long_t *v) 768{ 769 return arch_atomic_fetch_and(i, v); 770} 771 772static __always_inline long 773arch_atomic_long_fetch_and_acquire(long i, atomic_long_t *v) 774{ 775 return arch_atomic_fetch_and_acquire(i, v); 776} 777 778static __always_inline long 779arch_atomic_long_fetch_and_release(long i, atomic_long_t *v) 780{ 781 return arch_atomic_fetch_and_release(i, v); 782} 783 784static __always_inline long 785arch_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v) 786{ 787 return arch_atomic_fetch_and_relaxed(i, v); 788} 789 790static __always_inline void 791arch_atomic_long_andnot(long i, atomic_long_t *v) 792{ 793 arch_atomic_andnot(i, v); 794} 795 796static __always_inline long 797arch_atomic_long_fetch_andnot(long i, atomic_long_t *v) 798{ 799 return arch_atomic_fetch_andnot(i, v); 800} 801 802static __always_inline long 803arch_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v) 804{ 805 return arch_atomic_fetch_andnot_acquire(i, v); 806} 807 808static __always_inline long 809arch_atomic_long_fetch_andnot_release(long i, atomic_long_t *v) 810{ 811 return arch_atomic_fetch_andnot_release(i, v); 812} 813 814static __always_inline long 815arch_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v) 816{ 817 return arch_atomic_fetch_andnot_relaxed(i, v); 818} 819 820static __always_inline void 821arch_atomic_long_or(long i, atomic_long_t *v) 822{ 823 arch_atomic_or(i, v); 824} 825 826static __always_inline long 827arch_atomic_long_fetch_or(long i, atomic_long_t *v) 828{ 829 return arch_atomic_fetch_or(i, v); 830} 831 832static __always_inline long 833arch_atomic_long_fetch_or_acquire(long i, atomic_long_t *v) 834{ 835 return arch_atomic_fetch_or_acquire(i, v); 836} 837 838static __always_inline long 839arch_atomic_long_fetch_or_release(long i, atomic_long_t *v) 840{ 841 return arch_atomic_fetch_or_release(i, v); 842} 843 844static __always_inline long 845arch_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v) 846{ 847 return arch_atomic_fetch_or_relaxed(i, v); 848} 849 850static __always_inline void 851arch_atomic_long_xor(long i, atomic_long_t *v) 852{ 853 arch_atomic_xor(i, v); 854} 855 856static __always_inline long 857arch_atomic_long_fetch_xor(long i, atomic_long_t *v) 858{ 859 return arch_atomic_fetch_xor(i, v); 860} 861 862static __always_inline long 863arch_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v) 864{ 865 return arch_atomic_fetch_xor_acquire(i, v); 866} 867 868static __always_inline long 869arch_atomic_long_fetch_xor_release(long i, atomic_long_t *v) 870{ 871 return arch_atomic_fetch_xor_release(i, v); 872} 873 874static __always_inline long 875arch_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) 876{ 877 return arch_atomic_fetch_xor_relaxed(i, v); 878} 879 880static __always_inline long 881arch_atomic_long_xchg(atomic_long_t *v, long i) 882{ 883 return arch_atomic_xchg(v, i); 884} 885 886static __always_inline long 887arch_atomic_long_xchg_acquire(atomic_long_t *v, long i) 888{ 889 return arch_atomic_xchg_acquire(v, i); 890} 891 892static __always_inline long 893arch_atomic_long_xchg_release(atomic_long_t *v, long i) 894{ 895 return arch_atomic_xchg_release(v, i); 896} 897 898static __always_inline long 899arch_atomic_long_xchg_relaxed(atomic_long_t *v, long i) 900{ 901 return arch_atomic_xchg_relaxed(v, i); 902} 903 904static __always_inline long 905arch_atomic_long_cmpxchg(atomic_long_t *v, long old, long new) 906{ 907 return arch_atomic_cmpxchg(v, old, new); 908} 909 910static __always_inline long 911arch_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new) 912{ 913 return arch_atomic_cmpxchg_acquire(v, old, new); 914} 915 916static __always_inline long 917arch_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new) 918{ 919 return arch_atomic_cmpxchg_release(v, old, new); 920} 921 922static __always_inline long 923arch_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new) 924{ 925 return arch_atomic_cmpxchg_relaxed(v, old, new); 926} 927 928static __always_inline bool 929arch_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new) 930{ 931 return arch_atomic_try_cmpxchg(v, (int *)old, new); 932} 933 934static __always_inline bool 935arch_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new) 936{ 937 return arch_atomic_try_cmpxchg_acquire(v, (int *)old, new); 938} 939 940static __always_inline bool 941arch_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new) 942{ 943 return arch_atomic_try_cmpxchg_release(v, (int *)old, new); 944} 945 946static __always_inline bool 947arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new) 948{ 949 return arch_atomic_try_cmpxchg_relaxed(v, (int *)old, new); 950} 951 952static __always_inline bool 953arch_atomic_long_sub_and_test(long i, atomic_long_t *v) 954{ 955 return arch_atomic_sub_and_test(i, v); 956} 957 958static __always_inline bool 959arch_atomic_long_dec_and_test(atomic_long_t *v) 960{ 961 return arch_atomic_dec_and_test(v); 962} 963 964static __always_inline bool 965arch_atomic_long_inc_and_test(atomic_long_t *v) 966{ 967 return arch_atomic_inc_and_test(v); 968} 969 970static __always_inline bool 971arch_atomic_long_add_negative(long i, atomic_long_t *v) 972{ 973 return arch_atomic_add_negative(i, v); 974} 975 976static __always_inline long 977arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 978{ 979 return arch_atomic_fetch_add_unless(v, a, u); 980} 981 982static __always_inline bool 983arch_atomic_long_add_unless(atomic_long_t *v, long a, long u) 984{ 985 return arch_atomic_add_unless(v, a, u); 986} 987 988static __always_inline bool 989arch_atomic_long_inc_not_zero(atomic_long_t *v) 990{ 991 return arch_atomic_inc_not_zero(v); 992} 993 994static __always_inline bool 995arch_atomic_long_inc_unless_negative(atomic_long_t *v) 996{ 997 return arch_atomic_inc_unless_negative(v); 998} 999 1000static __always_inline bool 1001arch_atomic_long_dec_unless_positive(atomic_long_t *v) 1002{ 1003 return arch_atomic_dec_unless_positive(v); 1004} 1005 1006static __always_inline long 1007arch_atomic_long_dec_if_positive(atomic_long_t *v) 1008{ 1009 return arch_atomic_dec_if_positive(v); 1010} 1011 1012#endif /* CONFIG_64BIT */ 1013#endif /* _LINUX_ATOMIC_LONG_H */ 1014// e8f0e08ff072b74d180eabe2ad001282b38c2c88