cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

percpu.h (5283B)


      1/* SPDX-License-Identifier: GPL-2.0 */
      2/*
      3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
      4 */
      5#ifndef __ASM_PERCPU_H
      6#define __ASM_PERCPU_H
      7
      8#include <asm/cmpxchg.h>
      9#include <asm/loongarch.h>
     10
     11/* Use r21 for fast access */
     12register unsigned long __my_cpu_offset __asm__("$r21");
     13
     14static inline void set_my_cpu_offset(unsigned long off)
     15{
     16	__my_cpu_offset = off;
     17	csr_write64(off, PERCPU_BASE_KS);
     18}
     19#define __my_cpu_offset __my_cpu_offset
     20
     21#define PERCPU_OP(op, asm_op, c_op)					\
     22static inline unsigned long __percpu_##op(void *ptr,			\
     23			unsigned long val, int size)			\
     24{									\
     25	unsigned long ret;						\
     26									\
     27	switch (size) {							\
     28	case 4:								\
     29		__asm__ __volatile__(					\
     30		"am"#asm_op".w"	" %[ret], %[val], %[ptr]	\n"		\
     31		: [ret] "=&r" (ret), [ptr] "+ZB"(*(u32 *)ptr)		\
     32		: [val] "r" (val));					\
     33		break;							\
     34	case 8:								\
     35		__asm__ __volatile__(					\
     36		"am"#asm_op".d" " %[ret], %[val], %[ptr]	\n"		\
     37		: [ret] "=&r" (ret), [ptr] "+ZB"(*(u64 *)ptr)		\
     38		: [val] "r" (val));					\
     39		break;							\
     40	default:							\
     41		ret = 0;						\
     42		BUILD_BUG();						\
     43	}								\
     44									\
     45	return ret c_op val;						\
     46}
     47
     48PERCPU_OP(add, add, +)
     49PERCPU_OP(and, and, &)
     50PERCPU_OP(or, or, |)
     51#undef PERCPU_OP
     52
     53static inline unsigned long __percpu_read(void *ptr, int size)
     54{
     55	unsigned long ret;
     56
     57	switch (size) {
     58	case 1:
     59		__asm__ __volatile__ ("ldx.b %[ret], $r21, %[ptr]	\n"
     60		: [ret] "=&r"(ret)
     61		: [ptr] "r"(ptr)
     62		: "memory");
     63		break;
     64	case 2:
     65		__asm__ __volatile__ ("ldx.h %[ret], $r21, %[ptr]	\n"
     66		: [ret] "=&r"(ret)
     67		: [ptr] "r"(ptr)
     68		: "memory");
     69		break;
     70	case 4:
     71		__asm__ __volatile__ ("ldx.w %[ret], $r21, %[ptr]	\n"
     72		: [ret] "=&r"(ret)
     73		: [ptr] "r"(ptr)
     74		: "memory");
     75		break;
     76	case 8:
     77		__asm__ __volatile__ ("ldx.d %[ret], $r21, %[ptr]	\n"
     78		: [ret] "=&r"(ret)
     79		: [ptr] "r"(ptr)
     80		: "memory");
     81		break;
     82	default:
     83		ret = 0;
     84		BUILD_BUG();
     85	}
     86
     87	return ret;
     88}
     89
     90static inline void __percpu_write(void *ptr, unsigned long val, int size)
     91{
     92	switch (size) {
     93	case 1:
     94		__asm__ __volatile__("stx.b %[val], $r21, %[ptr]	\n"
     95		:
     96		: [val] "r" (val), [ptr] "r" (ptr)
     97		: "memory");
     98		break;
     99	case 2:
    100		__asm__ __volatile__("stx.h %[val], $r21, %[ptr]	\n"
    101		:
    102		: [val] "r" (val), [ptr] "r" (ptr)
    103		: "memory");
    104		break;
    105	case 4:
    106		__asm__ __volatile__("stx.w %[val], $r21, %[ptr]	\n"
    107		:
    108		: [val] "r" (val), [ptr] "r" (ptr)
    109		: "memory");
    110		break;
    111	case 8:
    112		__asm__ __volatile__("stx.d %[val], $r21, %[ptr]	\n"
    113		:
    114		: [val] "r" (val), [ptr] "r" (ptr)
    115		: "memory");
    116		break;
    117	default:
    118		BUILD_BUG();
    119	}
    120}
    121
    122static inline unsigned long __percpu_xchg(void *ptr, unsigned long val,
    123						int size)
    124{
    125	switch (size) {
    126	case 4:
    127		return __xchg_asm("amswap.w", (volatile u32 *)ptr, (u32)val);
    128
    129	case 8:
    130		return __xchg_asm("amswap.d", (volatile u64 *)ptr, (u64)val);
    131
    132	default:
    133		BUILD_BUG();
    134	}
    135
    136	return 0;
    137}
    138
    139/* this_cpu_cmpxchg */
    140#define _protect_cmpxchg_local(pcp, o, n)			\
    141({								\
    142	typeof(*raw_cpu_ptr(&(pcp))) __ret;			\
    143	preempt_disable_notrace();				\
    144	__ret = cmpxchg_local(raw_cpu_ptr(&(pcp)), o, n);	\
    145	preempt_enable_notrace();				\
    146	__ret;							\
    147})
    148
    149#define _percpu_read(pcp)						\
    150({									\
    151	typeof(pcp) __retval;						\
    152	__retval = (typeof(pcp))__percpu_read(&(pcp), sizeof(pcp));	\
    153	__retval;							\
    154})
    155
    156#define _percpu_write(pcp, val)						\
    157do {									\
    158	__percpu_write(&(pcp), (unsigned long)(val), sizeof(pcp));	\
    159} while (0)								\
    160
    161#define _pcp_protect(operation, pcp, val)			\
    162({								\
    163	typeof(pcp) __retval;					\
    164	preempt_disable_notrace();				\
    165	__retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)),	\
    166					  (val), sizeof(pcp));	\
    167	preempt_enable_notrace();				\
    168	__retval;						\
    169})
    170
    171#define _percpu_add(pcp, val) \
    172	_pcp_protect(__percpu_add, pcp, val)
    173
    174#define _percpu_add_return(pcp, val) _percpu_add(pcp, val)
    175
    176#define _percpu_and(pcp, val) \
    177	_pcp_protect(__percpu_and, pcp, val)
    178
    179#define _percpu_or(pcp, val) \
    180	_pcp_protect(__percpu_or, pcp, val)
    181
    182#define _percpu_xchg(pcp, val) ((typeof(pcp)) \
    183	_pcp_protect(__percpu_xchg, pcp, (unsigned long)(val)))
    184
    185#define this_cpu_add_4(pcp, val) _percpu_add(pcp, val)
    186#define this_cpu_add_8(pcp, val) _percpu_add(pcp, val)
    187
    188#define this_cpu_add_return_4(pcp, val) _percpu_add_return(pcp, val)
    189#define this_cpu_add_return_8(pcp, val) _percpu_add_return(pcp, val)
    190
    191#define this_cpu_and_4(pcp, val) _percpu_and(pcp, val)
    192#define this_cpu_and_8(pcp, val) _percpu_and(pcp, val)
    193
    194#define this_cpu_or_4(pcp, val) _percpu_or(pcp, val)
    195#define this_cpu_or_8(pcp, val) _percpu_or(pcp, val)
    196
    197#define this_cpu_read_1(pcp) _percpu_read(pcp)
    198#define this_cpu_read_2(pcp) _percpu_read(pcp)
    199#define this_cpu_read_4(pcp) _percpu_read(pcp)
    200#define this_cpu_read_8(pcp) _percpu_read(pcp)
    201
    202#define this_cpu_write_1(pcp, val) _percpu_write(pcp, val)
    203#define this_cpu_write_2(pcp, val) _percpu_write(pcp, val)
    204#define this_cpu_write_4(pcp, val) _percpu_write(pcp, val)
    205#define this_cpu_write_8(pcp, val) _percpu_write(pcp, val)
    206
    207#define this_cpu_xchg_4(pcp, val) _percpu_xchg(pcp, val)
    208#define this_cpu_xchg_8(pcp, val) _percpu_xchg(pcp, val)
    209
    210#define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
    211#define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
    212
    213#include <asm-generic/percpu.h>
    214
    215#endif /* __ASM_PERCPU_H */