cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

atomic.h (2858B)


      1/*
      2 * Copyright (C) 2014 Stefan Kristiansson <stefan.kristiansson@saunalahti.fi>
      3 *
      4 * This file is licensed under the terms of the GNU General Public License
      5 * version 2.  This program is licensed "as is" without any warranty of any
      6 * kind, whether express or implied.
      7 */
      8
      9#ifndef __ASM_OPENRISC_BITOPS_ATOMIC_H
     10#define __ASM_OPENRISC_BITOPS_ATOMIC_H
     11
     12static inline void set_bit(int nr, volatile unsigned long *addr)
     13{
     14	unsigned long mask = BIT_MASK(nr);
     15	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
     16	unsigned long tmp;
     17
     18	__asm__ __volatile__(
     19		"1:	l.lwa	%0,0(%1)	\n"
     20		"	l.or	%0,%0,%2	\n"
     21		"	l.swa	0(%1),%0	\n"
     22		"	l.bnf	1b		\n"
     23		"	 l.nop			\n"
     24		: "=&r"(tmp)
     25		: "r"(p), "r"(mask)
     26		: "cc", "memory");
     27}
     28
     29static inline void clear_bit(int nr, volatile unsigned long *addr)
     30{
     31	unsigned long mask = BIT_MASK(nr);
     32	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
     33	unsigned long tmp;
     34
     35	__asm__ __volatile__(
     36		"1:	l.lwa	%0,0(%1)	\n"
     37		"	l.and	%0,%0,%2	\n"
     38		"	l.swa	0(%1),%0	\n"
     39		"	l.bnf	1b		\n"
     40		"	 l.nop			\n"
     41		: "=&r"(tmp)
     42		: "r"(p), "r"(~mask)
     43		: "cc", "memory");
     44}
     45
     46static inline void change_bit(int nr, volatile unsigned long *addr)
     47{
     48	unsigned long mask = BIT_MASK(nr);
     49	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
     50	unsigned long tmp;
     51
     52	__asm__ __volatile__(
     53		"1:	l.lwa	%0,0(%1)	\n"
     54		"	l.xor	%0,%0,%2	\n"
     55		"	l.swa	0(%1),%0	\n"
     56		"	l.bnf	1b		\n"
     57		"	 l.nop			\n"
     58		: "=&r"(tmp)
     59		: "r"(p), "r"(mask)
     60		: "cc", "memory");
     61}
     62
     63static inline int test_and_set_bit(int nr, volatile unsigned long *addr)
     64{
     65	unsigned long mask = BIT_MASK(nr);
     66	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
     67	unsigned long old;
     68	unsigned long tmp;
     69
     70	__asm__ __volatile__(
     71		"1:	l.lwa	%0,0(%2)	\n"
     72		"	l.or	%1,%0,%3	\n"
     73		"	l.swa	0(%2),%1	\n"
     74		"	l.bnf	1b		\n"
     75		"	 l.nop			\n"
     76		: "=&r"(old), "=&r"(tmp)
     77		: "r"(p), "r"(mask)
     78		: "cc", "memory");
     79
     80	return (old & mask) != 0;
     81}
     82
     83static inline int test_and_clear_bit(int nr, volatile unsigned long *addr)
     84{
     85	unsigned long mask = BIT_MASK(nr);
     86	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
     87	unsigned long old;
     88	unsigned long tmp;
     89
     90	__asm__ __volatile__(
     91		"1:	l.lwa	%0,0(%2)	\n"
     92		"	l.and	%1,%0,%3	\n"
     93		"	l.swa	0(%2),%1	\n"
     94		"	l.bnf	1b		\n"
     95		"	 l.nop			\n"
     96		: "=&r"(old), "=&r"(tmp)
     97		: "r"(p), "r"(~mask)
     98		: "cc", "memory");
     99
    100	return (old & mask) != 0;
    101}
    102
    103static inline int test_and_change_bit(int nr, volatile unsigned long *addr)
    104{
    105	unsigned long mask = BIT_MASK(nr);
    106	unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
    107	unsigned long old;
    108	unsigned long tmp;
    109
    110	__asm__ __volatile__(
    111		"1:	l.lwa	%0,0(%2)	\n"
    112		"	l.xor	%1,%0,%3	\n"
    113		"	l.swa	0(%2),%1	\n"
    114		"	l.bnf	1b		\n"
    115		"	 l.nop			\n"
    116		: "=&r"(old), "=&r"(tmp)
    117		: "r"(p), "r"(mask)
    118		: "cc", "memory");
    119
    120	return (old & mask) != 0;
    121}
    122
    123#endif /* __ASM_OPENRISC_BITOPS_ATOMIC_H */