cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

synch.h (2067B)


      1/* SPDX-License-Identifier: GPL-2.0 */
      2#ifndef _ASM_POWERPC_SYNCH_H 
      3#define _ASM_POWERPC_SYNCH_H 
      4#ifdef __KERNEL__
      5
      6#include <asm/cputable.h>
      7#include <asm/feature-fixups.h>
      8#include <asm/ppc-opcode.h>
      9
     10#ifndef __ASSEMBLY__
     11extern unsigned int __start___lwsync_fixup, __stop___lwsync_fixup;
     12extern void do_lwsync_fixups(unsigned long value, void *fixup_start,
     13			     void *fixup_end);
     14
     15static inline void eieio(void)
     16{
     17	__asm__ __volatile__ ("eieio" : : : "memory");
     18}
     19
     20static inline void isync(void)
     21{
     22	__asm__ __volatile__ ("isync" : : : "memory");
     23}
     24
     25static inline void ppc_after_tlbiel_barrier(void)
     26{
     27	asm volatile("ptesync": : :"memory");
     28	/*
     29	 * POWER9, POWER10 need a cp_abort after tlbiel to ensure the copy is
     30	 * invalidated correctly. If this is not done, the paste can take data
     31	 * from the physical address that was translated at copy time.
     32	 *
     33	 * POWER9 in practice does not need this, because address spaces with
     34	 * accelerators mapped will use tlbie (which does invalidate the copy)
     35	 * to invalidate translations. It's not possible to limit POWER10 this
     36	 * way due to local copy-paste.
     37	 */
     38	asm volatile(ASM_FTR_IFSET(PPC_CP_ABORT, "", %0) : : "i" (CPU_FTR_ARCH_31) : "memory");
     39}
     40#endif /* __ASSEMBLY__ */
     41
     42#if defined(__powerpc64__)
     43#    define LWSYNC	lwsync
     44#elif defined(CONFIG_E500)
     45#    define LWSYNC					\
     46	START_LWSYNC_SECTION(96);			\
     47	sync;						\
     48	MAKE_LWSYNC_SECTION_ENTRY(96, __lwsync_fixup);
     49#else
     50#    define LWSYNC	sync
     51#endif
     52
     53#ifdef CONFIG_SMP
     54#define __PPC_ACQUIRE_BARRIER				\
     55	START_LWSYNC_SECTION(97);			\
     56	isync;						\
     57	MAKE_LWSYNC_SECTION_ENTRY(97, __lwsync_fixup);
     58#define PPC_ACQUIRE_BARRIER	 "\n" stringify_in_c(__PPC_ACQUIRE_BARRIER)
     59#define PPC_RELEASE_BARRIER	 stringify_in_c(LWSYNC) "\n"
     60#define PPC_ATOMIC_ENTRY_BARRIER "\n" stringify_in_c(sync) "\n"
     61#define PPC_ATOMIC_EXIT_BARRIER	 "\n" stringify_in_c(sync) "\n"
     62#else
     63#define PPC_ACQUIRE_BARRIER
     64#define PPC_RELEASE_BARRIER
     65#define PPC_ATOMIC_ENTRY_BARRIER
     66#define PPC_ATOMIC_EXIT_BARRIER
     67#endif
     68
     69#endif /* __KERNEL__ */
     70#endif	/* _ASM_POWERPC_SYNCH_H */