cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

percpu.h (6628B)


      1/* SPDX-License-Identifier: GPL-2.0 */
      2#ifndef __ARCH_S390_PERCPU__
      3#define __ARCH_S390_PERCPU__
      4
      5#include <linux/preempt.h>
      6#include <asm/cmpxchg.h>
      7
      8/*
      9 * s390 uses its own implementation for per cpu data, the offset of
     10 * the cpu local data area is cached in the cpu's lowcore memory.
     11 */
     12#define __my_cpu_offset S390_lowcore.percpu_offset
     13
     14/*
     15 * For 64 bit module code, the module may be more than 4G above the
     16 * per cpu area, use weak definitions to force the compiler to
     17 * generate external references.
     18 */
     19#if defined(MODULE)
     20#define ARCH_NEEDS_WEAK_PER_CPU
     21#endif
     22
     23/*
     24 * We use a compare-and-swap loop since that uses less cpu cycles than
     25 * disabling and enabling interrupts like the generic variant would do.
     26 */
     27#define arch_this_cpu_to_op_simple(pcp, val, op)			\
     28({									\
     29	typedef typeof(pcp) pcp_op_T__;					\
     30	pcp_op_T__ old__, new__, prev__;				\
     31	pcp_op_T__ *ptr__;						\
     32	preempt_disable_notrace();					\
     33	ptr__ = raw_cpu_ptr(&(pcp));					\
     34	prev__ = *ptr__;						\
     35	do {								\
     36		old__ = prev__;						\
     37		new__ = old__ op (val);					\
     38		prev__ = cmpxchg(ptr__, old__, new__);			\
     39	} while (prev__ != old__);					\
     40	preempt_enable_notrace();					\
     41	new__;								\
     42})
     43
     44#define this_cpu_add_1(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
     45#define this_cpu_add_2(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
     46#define this_cpu_add_return_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
     47#define this_cpu_add_return_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
     48#define this_cpu_and_1(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
     49#define this_cpu_and_2(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
     50#define this_cpu_or_1(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
     51#define this_cpu_or_2(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
     52
     53#ifndef CONFIG_HAVE_MARCH_Z196_FEATURES
     54
     55#define this_cpu_add_4(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
     56#define this_cpu_add_8(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
     57#define this_cpu_add_return_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
     58#define this_cpu_add_return_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
     59#define this_cpu_and_4(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
     60#define this_cpu_and_8(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
     61#define this_cpu_or_4(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
     62#define this_cpu_or_8(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
     63
     64#else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
     65
     66#define arch_this_cpu_add(pcp, val, op1, op2, szcast)			\
     67{									\
     68	typedef typeof(pcp) pcp_op_T__; 				\
     69	pcp_op_T__ val__ = (val);					\
     70	pcp_op_T__ old__, *ptr__;					\
     71	preempt_disable_notrace();					\
     72	ptr__ = raw_cpu_ptr(&(pcp)); 				\
     73	if (__builtin_constant_p(val__) &&				\
     74	    ((szcast)val__ > -129) && ((szcast)val__ < 128)) {		\
     75		asm volatile(						\
     76			op2 "   %[ptr__],%[val__]\n"			\
     77			: [ptr__] "+Q" (*ptr__) 			\
     78			: [val__] "i" ((szcast)val__)			\
     79			: "cc");					\
     80	} else {							\
     81		asm volatile(						\
     82			op1 "   %[old__],%[val__],%[ptr__]\n"		\
     83			: [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)	\
     84			: [val__] "d" (val__)				\
     85			: "cc");					\
     86	}								\
     87	preempt_enable_notrace();					\
     88}
     89
     90#define this_cpu_add_4(pcp, val) arch_this_cpu_add(pcp, val, "laa", "asi", int)
     91#define this_cpu_add_8(pcp, val) arch_this_cpu_add(pcp, val, "laag", "agsi", long)
     92
     93#define arch_this_cpu_add_return(pcp, val, op)				\
     94({									\
     95	typedef typeof(pcp) pcp_op_T__; 				\
     96	pcp_op_T__ val__ = (val);					\
     97	pcp_op_T__ old__, *ptr__;					\
     98	preempt_disable_notrace();					\
     99	ptr__ = raw_cpu_ptr(&(pcp));	 				\
    100	asm volatile(							\
    101		op "    %[old__],%[val__],%[ptr__]\n"			\
    102		: [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)		\
    103		: [val__] "d" (val__)					\
    104		: "cc");						\
    105	preempt_enable_notrace();						\
    106	old__ + val__;							\
    107})
    108
    109#define this_cpu_add_return_4(pcp, val) arch_this_cpu_add_return(pcp, val, "laa")
    110#define this_cpu_add_return_8(pcp, val) arch_this_cpu_add_return(pcp, val, "laag")
    111
    112#define arch_this_cpu_to_op(pcp, val, op)				\
    113{									\
    114	typedef typeof(pcp) pcp_op_T__; 				\
    115	pcp_op_T__ val__ = (val);					\
    116	pcp_op_T__ old__, *ptr__;					\
    117	preempt_disable_notrace();					\
    118	ptr__ = raw_cpu_ptr(&(pcp));	 				\
    119	asm volatile(							\
    120		op "    %[old__],%[val__],%[ptr__]\n"			\
    121		: [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)		\
    122		: [val__] "d" (val__)					\
    123		: "cc");						\
    124	preempt_enable_notrace();					\
    125}
    126
    127#define this_cpu_and_4(pcp, val)	arch_this_cpu_to_op(pcp, val, "lan")
    128#define this_cpu_and_8(pcp, val)	arch_this_cpu_to_op(pcp, val, "lang")
    129#define this_cpu_or_4(pcp, val)		arch_this_cpu_to_op(pcp, val, "lao")
    130#define this_cpu_or_8(pcp, val)		arch_this_cpu_to_op(pcp, val, "laog")
    131
    132#endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
    133
    134#define arch_this_cpu_cmpxchg(pcp, oval, nval)				\
    135({									\
    136	typedef typeof(pcp) pcp_op_T__;					\
    137	pcp_op_T__ ret__;						\
    138	pcp_op_T__ *ptr__;						\
    139	preempt_disable_notrace();					\
    140	ptr__ = raw_cpu_ptr(&(pcp));					\
    141	ret__ = cmpxchg(ptr__, oval, nval);				\
    142	preempt_enable_notrace();					\
    143	ret__;								\
    144})
    145
    146#define this_cpu_cmpxchg_1(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
    147#define this_cpu_cmpxchg_2(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
    148#define this_cpu_cmpxchg_4(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
    149#define this_cpu_cmpxchg_8(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
    150
    151#define arch_this_cpu_xchg(pcp, nval)					\
    152({									\
    153	typeof(pcp) *ptr__;						\
    154	typeof(pcp) ret__;						\
    155	preempt_disable_notrace();					\
    156	ptr__ = raw_cpu_ptr(&(pcp));					\
    157	ret__ = xchg(ptr__, nval);					\
    158	preempt_enable_notrace();					\
    159	ret__;								\
    160})
    161
    162#define this_cpu_xchg_1(pcp, nval) arch_this_cpu_xchg(pcp, nval)
    163#define this_cpu_xchg_2(pcp, nval) arch_this_cpu_xchg(pcp, nval)
    164#define this_cpu_xchg_4(pcp, nval) arch_this_cpu_xchg(pcp, nval)
    165#define this_cpu_xchg_8(pcp, nval) arch_this_cpu_xchg(pcp, nval)
    166
    167#define arch_this_cpu_cmpxchg_double(pcp1, pcp2, o1, o2, n1, n2)	    \
    168({									    \
    169	typeof(pcp1) *p1__;						    \
    170	typeof(pcp2) *p2__;						    \
    171	int ret__;							    \
    172									    \
    173	preempt_disable_notrace();					    \
    174	p1__ = raw_cpu_ptr(&(pcp1));					    \
    175	p2__ = raw_cpu_ptr(&(pcp2));					    \
    176	ret__ = __cmpxchg_double((unsigned long)p1__, (unsigned long)p2__,  \
    177				 (unsigned long)(o1), (unsigned long)(o2),  \
    178				 (unsigned long)(n1), (unsigned long)(n2)); \
    179	preempt_enable_notrace();					    \
    180	ret__;								    \
    181})
    182
    183#define this_cpu_cmpxchg_double_8 arch_this_cpu_cmpxchg_double
    184
    185#include <asm-generic/percpu.h>
    186
    187#endif /* __ARCH_S390_PERCPU__ */