cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

atomic64_32.h (8367B)


      1/* SPDX-License-Identifier: GPL-2.0 */
      2#ifndef _ASM_X86_ATOMIC64_32_H
      3#define _ASM_X86_ATOMIC64_32_H
      4
      5#include <linux/compiler.h>
      6#include <linux/types.h>
      7//#include <asm/cmpxchg.h>
      8
      9/* An 64bit atomic type */
     10
     11typedef struct {
     12	s64 __aligned(8) counter;
     13} atomic64_t;
     14
     15#define ATOMIC64_INIT(val)	{ (val) }
     16
     17#define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
     18#ifndef ATOMIC64_EXPORT
     19#define ATOMIC64_DECL_ONE __ATOMIC64_DECL
     20#else
     21#define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \
     22	ATOMIC64_EXPORT(atomic64_##sym)
     23#endif
     24
     25#ifdef CONFIG_X86_CMPXCHG64
     26#define __alternative_atomic64(f, g, out, in...) \
     27	asm volatile("call %P[func]" \
     28		     : out : [func] "i" (atomic64_##g##_cx8), ## in)
     29
     30#define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)
     31#else
     32#define __alternative_atomic64(f, g, out, in...) \
     33	alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \
     34			 X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in)
     35
     36#define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \
     37	ATOMIC64_DECL_ONE(sym##_386)
     38
     39ATOMIC64_DECL_ONE(add_386);
     40ATOMIC64_DECL_ONE(sub_386);
     41ATOMIC64_DECL_ONE(inc_386);
     42ATOMIC64_DECL_ONE(dec_386);
     43#endif
     44
     45#define alternative_atomic64(f, out, in...) \
     46	__alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in)
     47
     48ATOMIC64_DECL(read);
     49ATOMIC64_DECL(set);
     50ATOMIC64_DECL(xchg);
     51ATOMIC64_DECL(add_return);
     52ATOMIC64_DECL(sub_return);
     53ATOMIC64_DECL(inc_return);
     54ATOMIC64_DECL(dec_return);
     55ATOMIC64_DECL(dec_if_positive);
     56ATOMIC64_DECL(inc_not_zero);
     57ATOMIC64_DECL(add_unless);
     58
     59#undef ATOMIC64_DECL
     60#undef ATOMIC64_DECL_ONE
     61#undef __ATOMIC64_DECL
     62#undef ATOMIC64_EXPORT
     63
     64/**
     65 * arch_atomic64_cmpxchg - cmpxchg atomic64 variable
     66 * @v: pointer to type atomic64_t
     67 * @o: expected value
     68 * @n: new value
     69 *
     70 * Atomically sets @v to @n if it was equal to @o and returns
     71 * the old value.
     72 */
     73
     74static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n)
     75{
     76	return arch_cmpxchg64(&v->counter, o, n);
     77}
     78#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
     79
     80/**
     81 * arch_atomic64_xchg - xchg atomic64 variable
     82 * @v: pointer to type atomic64_t
     83 * @n: value to assign
     84 *
     85 * Atomically xchgs the value of @v to @n and returns
     86 * the old value.
     87 */
     88static inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n)
     89{
     90	s64 o;
     91	unsigned high = (unsigned)(n >> 32);
     92	unsigned low = (unsigned)n;
     93	alternative_atomic64(xchg, "=&A" (o),
     94			     "S" (v), "b" (low), "c" (high)
     95			     : "memory");
     96	return o;
     97}
     98#define arch_atomic64_xchg arch_atomic64_xchg
     99
    100/**
    101 * arch_atomic64_set - set atomic64 variable
    102 * @v: pointer to type atomic64_t
    103 * @i: value to assign
    104 *
    105 * Atomically sets the value of @v to @n.
    106 */
    107static inline void arch_atomic64_set(atomic64_t *v, s64 i)
    108{
    109	unsigned high = (unsigned)(i >> 32);
    110	unsigned low = (unsigned)i;
    111	alternative_atomic64(set, /* no output */,
    112			     "S" (v), "b" (low), "c" (high)
    113			     : "eax", "edx", "memory");
    114}
    115
    116/**
    117 * arch_atomic64_read - read atomic64 variable
    118 * @v: pointer to type atomic64_t
    119 *
    120 * Atomically reads the value of @v and returns it.
    121 */
    122static inline s64 arch_atomic64_read(const atomic64_t *v)
    123{
    124	s64 r;
    125	alternative_atomic64(read, "=&A" (r), "c" (v) : "memory");
    126	return r;
    127}
    128
    129/**
    130 * arch_atomic64_add_return - add and return
    131 * @i: integer value to add
    132 * @v: pointer to type atomic64_t
    133 *
    134 * Atomically adds @i to @v and returns @i + *@v
    135 */
    136static inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
    137{
    138	alternative_atomic64(add_return,
    139			     ASM_OUTPUT2("+A" (i), "+c" (v)),
    140			     ASM_NO_INPUT_CLOBBER("memory"));
    141	return i;
    142}
    143#define arch_atomic64_add_return arch_atomic64_add_return
    144
    145/*
    146 * Other variants with different arithmetic operators:
    147 */
    148static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v)
    149{
    150	alternative_atomic64(sub_return,
    151			     ASM_OUTPUT2("+A" (i), "+c" (v)),
    152			     ASM_NO_INPUT_CLOBBER("memory"));
    153	return i;
    154}
    155#define arch_atomic64_sub_return arch_atomic64_sub_return
    156
    157static inline s64 arch_atomic64_inc_return(atomic64_t *v)
    158{
    159	s64 a;
    160	alternative_atomic64(inc_return, "=&A" (a),
    161			     "S" (v) : "memory", "ecx");
    162	return a;
    163}
    164#define arch_atomic64_inc_return arch_atomic64_inc_return
    165
    166static inline s64 arch_atomic64_dec_return(atomic64_t *v)
    167{
    168	s64 a;
    169	alternative_atomic64(dec_return, "=&A" (a),
    170			     "S" (v) : "memory", "ecx");
    171	return a;
    172}
    173#define arch_atomic64_dec_return arch_atomic64_dec_return
    174
    175/**
    176 * arch_atomic64_add - add integer to atomic64 variable
    177 * @i: integer value to add
    178 * @v: pointer to type atomic64_t
    179 *
    180 * Atomically adds @i to @v.
    181 */
    182static inline s64 arch_atomic64_add(s64 i, atomic64_t *v)
    183{
    184	__alternative_atomic64(add, add_return,
    185			       ASM_OUTPUT2("+A" (i), "+c" (v)),
    186			       ASM_NO_INPUT_CLOBBER("memory"));
    187	return i;
    188}
    189
    190/**
    191 * arch_atomic64_sub - subtract the atomic64 variable
    192 * @i: integer value to subtract
    193 * @v: pointer to type atomic64_t
    194 *
    195 * Atomically subtracts @i from @v.
    196 */
    197static inline s64 arch_atomic64_sub(s64 i, atomic64_t *v)
    198{
    199	__alternative_atomic64(sub, sub_return,
    200			       ASM_OUTPUT2("+A" (i), "+c" (v)),
    201			       ASM_NO_INPUT_CLOBBER("memory"));
    202	return i;
    203}
    204
    205/**
    206 * arch_atomic64_inc - increment atomic64 variable
    207 * @v: pointer to type atomic64_t
    208 *
    209 * Atomically increments @v by 1.
    210 */
    211static inline void arch_atomic64_inc(atomic64_t *v)
    212{
    213	__alternative_atomic64(inc, inc_return, /* no output */,
    214			       "S" (v) : "memory", "eax", "ecx", "edx");
    215}
    216#define arch_atomic64_inc arch_atomic64_inc
    217
    218/**
    219 * arch_atomic64_dec - decrement atomic64 variable
    220 * @v: pointer to type atomic64_t
    221 *
    222 * Atomically decrements @v by 1.
    223 */
    224static inline void arch_atomic64_dec(atomic64_t *v)
    225{
    226	__alternative_atomic64(dec, dec_return, /* no output */,
    227			       "S" (v) : "memory", "eax", "ecx", "edx");
    228}
    229#define arch_atomic64_dec arch_atomic64_dec
    230
    231/**
    232 * arch_atomic64_add_unless - add unless the number is a given value
    233 * @v: pointer of type atomic64_t
    234 * @a: the amount to add to v...
    235 * @u: ...unless v is equal to u.
    236 *
    237 * Atomically adds @a to @v, so long as it was not @u.
    238 * Returns non-zero if the add was done, zero otherwise.
    239 */
    240static inline int arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
    241{
    242	unsigned low = (unsigned)u;
    243	unsigned high = (unsigned)(u >> 32);
    244	alternative_atomic64(add_unless,
    245			     ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)),
    246			     "S" (v) : "memory");
    247	return (int)a;
    248}
    249#define arch_atomic64_add_unless arch_atomic64_add_unless
    250
    251static inline int arch_atomic64_inc_not_zero(atomic64_t *v)
    252{
    253	int r;
    254	alternative_atomic64(inc_not_zero, "=&a" (r),
    255			     "S" (v) : "ecx", "edx", "memory");
    256	return r;
    257}
    258#define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
    259
    260static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
    261{
    262	s64 r;
    263	alternative_atomic64(dec_if_positive, "=&A" (r),
    264			     "S" (v) : "ecx", "memory");
    265	return r;
    266}
    267#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
    268
    269#undef alternative_atomic64
    270#undef __alternative_atomic64
    271
    272static inline void arch_atomic64_and(s64 i, atomic64_t *v)
    273{
    274	s64 old, c = 0;
    275
    276	while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c)
    277		c = old;
    278}
    279
    280static inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
    281{
    282	s64 old, c = 0;
    283
    284	while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c)
    285		c = old;
    286
    287	return old;
    288}
    289#define arch_atomic64_fetch_and arch_atomic64_fetch_and
    290
    291static inline void arch_atomic64_or(s64 i, atomic64_t *v)
    292{
    293	s64 old, c = 0;
    294
    295	while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c)
    296		c = old;
    297}
    298
    299static inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
    300{
    301	s64 old, c = 0;
    302
    303	while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c)
    304		c = old;
    305
    306	return old;
    307}
    308#define arch_atomic64_fetch_or arch_atomic64_fetch_or
    309
    310static inline void arch_atomic64_xor(s64 i, atomic64_t *v)
    311{
    312	s64 old, c = 0;
    313
    314	while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c)
    315		c = old;
    316}
    317
    318static inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
    319{
    320	s64 old, c = 0;
    321
    322	while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c)
    323		c = old;
    324
    325	return old;
    326}
    327#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
    328
    329static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
    330{
    331	s64 old, c = 0;
    332
    333	while ((old = arch_atomic64_cmpxchg(v, c, c + i)) != c)
    334		c = old;
    335
    336	return old;
    337}
    338#define arch_atomic64_fetch_add arch_atomic64_fetch_add
    339
    340#define arch_atomic64_fetch_sub(i, v)	arch_atomic64_fetch_add(-(i), (v))
    341
    342#endif /* _ASM_X86_ATOMIC64_32_H */