cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

local.h (4629B)


      1/* SPDX-License-Identifier: GPL-2.0 */
      2#ifndef _ARCH_MIPS_LOCAL_H
      3#define _ARCH_MIPS_LOCAL_H
      4
      5#include <linux/percpu.h>
      6#include <linux/bitops.h>
      7#include <linux/atomic.h>
      8#include <asm/asm.h>
      9#include <asm/cmpxchg.h>
     10#include <asm/compiler.h>
     11
     12typedef struct
     13{
     14	atomic_long_t a;
     15} local_t;
     16
     17#define LOCAL_INIT(i)	{ ATOMIC_LONG_INIT(i) }
     18
     19#define local_read(l)	atomic_long_read(&(l)->a)
     20#define local_set(l, i) atomic_long_set(&(l)->a, (i))
     21
     22#define local_add(i, l) atomic_long_add((i), (&(l)->a))
     23#define local_sub(i, l) atomic_long_sub((i), (&(l)->a))
     24#define local_inc(l)	atomic_long_inc(&(l)->a)
     25#define local_dec(l)	atomic_long_dec(&(l)->a)
     26
     27/*
     28 * Same as above, but return the result value
     29 */
     30static __inline__ long local_add_return(long i, local_t * l)
     31{
     32	unsigned long result;
     33
     34	if (kernel_uses_llsc) {
     35		unsigned long temp;
     36
     37		__asm__ __volatile__(
     38		"	.set	push					\n"
     39		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
     40			__SYNC(full, loongson3_war) "                   \n"
     41		"1:"	__stringify(LONG_LL)	"	%1, %2		\n"
     42			__stringify(LONG_ADDU)	"	%0, %1, %3	\n"
     43			__stringify(LONG_SC)	"	%0, %2		\n"
     44			__stringify(SC_BEQZ)	"	%0, 1b		\n"
     45			__stringify(LONG_ADDU)	"	%0, %1, %3	\n"
     46		"	.set	pop					\n"
     47		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
     48		: "Ir" (i), "m" (l->a.counter)
     49		: "memory");
     50	} else {
     51		unsigned long flags;
     52
     53		local_irq_save(flags);
     54		result = l->a.counter;
     55		result += i;
     56		l->a.counter = result;
     57		local_irq_restore(flags);
     58	}
     59
     60	return result;
     61}
     62
     63static __inline__ long local_sub_return(long i, local_t * l)
     64{
     65	unsigned long result;
     66
     67	if (kernel_uses_llsc) {
     68		unsigned long temp;
     69
     70		__asm__ __volatile__(
     71		"	.set	push					\n"
     72		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
     73			__SYNC(full, loongson3_war) "                   \n"
     74		"1:"	__stringify(LONG_LL)	"	%1, %2		\n"
     75			__stringify(LONG_SUBU)	"	%0, %1, %3	\n"
     76			__stringify(LONG_SUBU)	"	%0, %1, %3	\n"
     77			__stringify(LONG_SC)	"	%0, %2		\n"
     78			__stringify(SC_BEQZ)	"	%0, 1b		\n"
     79			__stringify(LONG_SUBU)	"	%0, %1, %3	\n"
     80		"	.set	pop					\n"
     81		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
     82		: "Ir" (i), "m" (l->a.counter)
     83		: "memory");
     84	} else {
     85		unsigned long flags;
     86
     87		local_irq_save(flags);
     88		result = l->a.counter;
     89		result -= i;
     90		l->a.counter = result;
     91		local_irq_restore(flags);
     92	}
     93
     94	return result;
     95}
     96
     97#define local_cmpxchg(l, o, n) \
     98	((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
     99#define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
    100
    101/**
    102 * local_add_unless - add unless the number is a given value
    103 * @l: pointer of type local_t
    104 * @a: the amount to add to l...
    105 * @u: ...unless l is equal to u.
    106 *
    107 * Atomically adds @a to @l, so long as it was not @u.
    108 * Returns non-zero if @l was not @u, and zero otherwise.
    109 */
    110#define local_add_unless(l, a, u)				\
    111({								\
    112	long c, old;						\
    113	c = local_read(l);					\
    114	while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
    115		c = old;					\
    116	c != (u);						\
    117})
    118#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
    119
    120#define local_dec_return(l) local_sub_return(1, (l))
    121#define local_inc_return(l) local_add_return(1, (l))
    122
    123/*
    124 * local_sub_and_test - subtract value from variable and test result
    125 * @i: integer value to subtract
    126 * @l: pointer of type local_t
    127 *
    128 * Atomically subtracts @i from @l and returns
    129 * true if the result is zero, or false for all
    130 * other cases.
    131 */
    132#define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0)
    133
    134/*
    135 * local_inc_and_test - increment and test
    136 * @l: pointer of type local_t
    137 *
    138 * Atomically increments @l by 1
    139 * and returns true if the result is zero, or false for all
    140 * other cases.
    141 */
    142#define local_inc_and_test(l) (local_inc_return(l) == 0)
    143
    144/*
    145 * local_dec_and_test - decrement by 1 and test
    146 * @l: pointer of type local_t
    147 *
    148 * Atomically decrements @l by 1 and
    149 * returns true if the result is 0, or false for all other
    150 * cases.
    151 */
    152#define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
    153
    154/*
    155 * local_add_negative - add and test if negative
    156 * @l: pointer of type local_t
    157 * @i: integer value to add
    158 *
    159 * Atomically adds @i to @l and returns true
    160 * if the result is negative, or false when
    161 * result is greater than or equal to zero.
    162 */
    163#define local_add_negative(i, l) (local_add_return(i, (l)) < 0)
    164
    165/* Use these for per-cpu local_t variables: on some archs they are
    166 * much more efficient than these naive implementations.  Note they take
    167 * a variable, not an address.
    168 */
    169
    170#define __local_inc(l)		((l)->a.counter++)
    171#define __local_dec(l)		((l)->a.counter++)
    172#define __local_add(i, l)	((l)->a.counter+=(i))
    173#define __local_sub(i, l)	((l)->a.counter-=(i))
    174
    175#endif /* _ARCH_MIPS_LOCAL_H */