cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

atomic_64.S (4174B)


      1/* SPDX-License-Identifier: GPL-2.0 */
      2/* atomic.S: These things are too big to do inline.
      3 *
      4 * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
      5 */
      6
      7#include <linux/linkage.h>
      8#include <asm/asi.h>
      9#include <asm/backoff.h>
     10#include <asm/export.h>
     11
     12	.text
     13
     14	/* Three versions of the atomic routines, one that
     15	 * does not return a value and does not perform
     16	 * memory barriers, and a two which return
     17	 * a value, the new and old value resp. and does the
     18	 * barriers.
     19	 */
     20
     21#define ATOMIC_OP(op)							\
     22ENTRY(arch_atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */		\
     23	BACKOFF_SETUP(%o2);						\
     241:	lduw	[%o1], %g1;						\
     25	op	%g1, %o0, %g7;						\
     26	cas	[%o1], %g1, %g7;					\
     27	cmp	%g1, %g7;						\
     28	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
     29	 nop;								\
     30	retl;								\
     31	 nop;								\
     322:	BACKOFF_SPIN(%o2, %o3, 1b);					\
     33ENDPROC(arch_atomic_##op);						\
     34EXPORT_SYMBOL(arch_atomic_##op);
     35
     36#define ATOMIC_OP_RETURN(op)						\
     37ENTRY(arch_atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */\
     38	BACKOFF_SETUP(%o2);						\
     391:	lduw	[%o1], %g1;						\
     40	op	%g1, %o0, %g7;						\
     41	cas	[%o1], %g1, %g7;					\
     42	cmp	%g1, %g7;						\
     43	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
     44	 op	%g1, %o0, %g1;						\
     45	retl;								\
     46	 sra	%g1, 0, %o0;						\
     472:	BACKOFF_SPIN(%o2, %o3, 1b);					\
     48ENDPROC(arch_atomic_##op##_return);					\
     49EXPORT_SYMBOL(arch_atomic_##op##_return);
     50
     51#define ATOMIC_FETCH_OP(op)						\
     52ENTRY(arch_atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */	\
     53	BACKOFF_SETUP(%o2);						\
     541:	lduw	[%o1], %g1;						\
     55	op	%g1, %o0, %g7;						\
     56	cas	[%o1], %g1, %g7;					\
     57	cmp	%g1, %g7;						\
     58	bne,pn	%icc, BACKOFF_LABEL(2f, 1b);				\
     59	 nop;								\
     60	retl;								\
     61	 sra	%g1, 0, %o0;						\
     622:	BACKOFF_SPIN(%o2, %o3, 1b);					\
     63ENDPROC(arch_atomic_fetch_##op);					\
     64EXPORT_SYMBOL(arch_atomic_fetch_##op);
     65
     66ATOMIC_OP(add)
     67ATOMIC_OP_RETURN(add)
     68ATOMIC_FETCH_OP(add)
     69
     70ATOMIC_OP(sub)
     71ATOMIC_OP_RETURN(sub)
     72ATOMIC_FETCH_OP(sub)
     73
     74ATOMIC_OP(and)
     75ATOMIC_FETCH_OP(and)
     76
     77ATOMIC_OP(or)
     78ATOMIC_FETCH_OP(or)
     79
     80ATOMIC_OP(xor)
     81ATOMIC_FETCH_OP(xor)
     82
     83#undef ATOMIC_FETCH_OP
     84#undef ATOMIC_OP_RETURN
     85#undef ATOMIC_OP
     86
     87#define ATOMIC64_OP(op)							\
     88ENTRY(arch_atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */	\
     89	BACKOFF_SETUP(%o2);						\
     901:	ldx	[%o1], %g1;						\
     91	op	%g1, %o0, %g7;						\
     92	casx	[%o1], %g1, %g7;					\
     93	cmp	%g1, %g7;						\
     94	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
     95	 nop;								\
     96	retl;								\
     97	 nop;								\
     982:	BACKOFF_SPIN(%o2, %o3, 1b);					\
     99ENDPROC(arch_atomic64_##op);						\
    100EXPORT_SYMBOL(arch_atomic64_##op);
    101
    102#define ATOMIC64_OP_RETURN(op)						\
    103ENTRY(arch_atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */	\
    104	BACKOFF_SETUP(%o2);						\
    1051:	ldx	[%o1], %g1;						\
    106	op	%g1, %o0, %g7;						\
    107	casx	[%o1], %g1, %g7;					\
    108	cmp	%g1, %g7;						\
    109	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
    110	 nop;								\
    111	retl;								\
    112	 op	%g1, %o0, %o0;						\
    1132:	BACKOFF_SPIN(%o2, %o3, 1b);					\
    114ENDPROC(arch_atomic64_##op##_return);					\
    115EXPORT_SYMBOL(arch_atomic64_##op##_return);
    116
    117#define ATOMIC64_FETCH_OP(op)						\
    118ENTRY(arch_atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */	\
    119	BACKOFF_SETUP(%o2);						\
    1201:	ldx	[%o1], %g1;						\
    121	op	%g1, %o0, %g7;						\
    122	casx	[%o1], %g1, %g7;					\
    123	cmp	%g1, %g7;						\
    124	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b);				\
    125	 nop;								\
    126	retl;								\
    127	 mov	%g1, %o0;						\
    1282:	BACKOFF_SPIN(%o2, %o3, 1b);					\
    129ENDPROC(arch_atomic64_fetch_##op);					\
    130EXPORT_SYMBOL(arch_atomic64_fetch_##op);
    131
    132ATOMIC64_OP(add)
    133ATOMIC64_OP_RETURN(add)
    134ATOMIC64_FETCH_OP(add)
    135
    136ATOMIC64_OP(sub)
    137ATOMIC64_OP_RETURN(sub)
    138ATOMIC64_FETCH_OP(sub)
    139
    140ATOMIC64_OP(and)
    141ATOMIC64_FETCH_OP(and)
    142
    143ATOMIC64_OP(or)
    144ATOMIC64_FETCH_OP(or)
    145
    146ATOMIC64_OP(xor)
    147ATOMIC64_FETCH_OP(xor)
    148
    149#undef ATOMIC64_FETCH_OP
    150#undef ATOMIC64_OP_RETURN
    151#undef ATOMIC64_OP
    152
    153ENTRY(arch_atomic64_dec_if_positive) /* %o0 = atomic_ptr */
    154	BACKOFF_SETUP(%o2)
    1551:	ldx	[%o0], %g1
    156	brlez,pn %g1, 3f
    157	 sub	%g1, 1, %g7
    158	casx	[%o0], %g1, %g7
    159	cmp	%g1, %g7
    160	bne,pn	%xcc, BACKOFF_LABEL(2f, 1b)
    161	 nop
    1623:	retl
    163	 sub	%g1, 1, %o0
    1642:	BACKOFF_SPIN(%o2, %o3, 1b)
    165ENDPROC(arch_atomic64_dec_if_positive)
    166EXPORT_SYMBOL(arch_atomic64_dec_if_positive)