cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

string_32.h (5491B)


      1/* SPDX-License-Identifier: GPL-2.0 */
      2#ifndef _ASM_X86_STRING_32_H
      3#define _ASM_X86_STRING_32_H
      4
      5#ifdef __KERNEL__
      6
      7/* Let gcc decide whether to inline or use the out of line functions */
      8
      9#define __HAVE_ARCH_STRCPY
     10extern char *strcpy(char *dest, const char *src);
     11
     12#define __HAVE_ARCH_STRNCPY
     13extern char *strncpy(char *dest, const char *src, size_t count);
     14
     15#define __HAVE_ARCH_STRCAT
     16extern char *strcat(char *dest, const char *src);
     17
     18#define __HAVE_ARCH_STRNCAT
     19extern char *strncat(char *dest, const char *src, size_t count);
     20
     21#define __HAVE_ARCH_STRCMP
     22extern int strcmp(const char *cs, const char *ct);
     23
     24#define __HAVE_ARCH_STRNCMP
     25extern int strncmp(const char *cs, const char *ct, size_t count);
     26
     27#define __HAVE_ARCH_STRCHR
     28extern char *strchr(const char *s, int c);
     29
     30#define __HAVE_ARCH_STRLEN
     31extern size_t strlen(const char *s);
     32
     33static __always_inline void *__memcpy(void *to, const void *from, size_t n)
     34{
     35	int d0, d1, d2;
     36	asm volatile("rep ; movsl\n\t"
     37		     "movl %4,%%ecx\n\t"
     38		     "andl $3,%%ecx\n\t"
     39		     "jz 1f\n\t"
     40		     "rep ; movsb\n\t"
     41		     "1:"
     42		     : "=&c" (d0), "=&D" (d1), "=&S" (d2)
     43		     : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from)
     44		     : "memory");
     45	return to;
     46}
     47
     48/*
     49 * This looks ugly, but the compiler can optimize it totally,
     50 * as the count is constant.
     51 */
     52static __always_inline void *__constant_memcpy(void *to, const void *from,
     53					       size_t n)
     54{
     55	long esi, edi;
     56	if (!n)
     57		return to;
     58
     59	switch (n) {
     60	case 1:
     61		*(char *)to = *(char *)from;
     62		return to;
     63	case 2:
     64		*(short *)to = *(short *)from;
     65		return to;
     66	case 4:
     67		*(int *)to = *(int *)from;
     68		return to;
     69	case 3:
     70		*(short *)to = *(short *)from;
     71		*((char *)to + 2) = *((char *)from + 2);
     72		return to;
     73	case 5:
     74		*(int *)to = *(int *)from;
     75		*((char *)to + 4) = *((char *)from + 4);
     76		return to;
     77	case 6:
     78		*(int *)to = *(int *)from;
     79		*((short *)to + 2) = *((short *)from + 2);
     80		return to;
     81	case 8:
     82		*(int *)to = *(int *)from;
     83		*((int *)to + 1) = *((int *)from + 1);
     84		return to;
     85	}
     86
     87	esi = (long)from;
     88	edi = (long)to;
     89	if (n >= 5 * 4) {
     90		/* large block: use rep prefix */
     91		int ecx;
     92		asm volatile("rep ; movsl"
     93			     : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
     94			     : "0" (n / 4), "1" (edi), "2" (esi)
     95			     : "memory"
     96		);
     97	} else {
     98		/* small block: don't clobber ecx + smaller code */
     99		if (n >= 4 * 4)
    100			asm volatile("movsl"
    101				     : "=&D"(edi), "=&S"(esi)
    102				     : "0"(edi), "1"(esi)
    103				     : "memory");
    104		if (n >= 3 * 4)
    105			asm volatile("movsl"
    106				     : "=&D"(edi), "=&S"(esi)
    107				     : "0"(edi), "1"(esi)
    108				     : "memory");
    109		if (n >= 2 * 4)
    110			asm volatile("movsl"
    111				     : "=&D"(edi), "=&S"(esi)
    112				     : "0"(edi), "1"(esi)
    113				     : "memory");
    114		if (n >= 1 * 4)
    115			asm volatile("movsl"
    116				     : "=&D"(edi), "=&S"(esi)
    117				     : "0"(edi), "1"(esi)
    118				     : "memory");
    119	}
    120	switch (n % 4) {
    121		/* tail */
    122	case 0:
    123		return to;
    124	case 1:
    125		asm volatile("movsb"
    126			     : "=&D"(edi), "=&S"(esi)
    127			     : "0"(edi), "1"(esi)
    128			     : "memory");
    129		return to;
    130	case 2:
    131		asm volatile("movsw"
    132			     : "=&D"(edi), "=&S"(esi)
    133			     : "0"(edi), "1"(esi)
    134			     : "memory");
    135		return to;
    136	default:
    137		asm volatile("movsw\n\tmovsb"
    138			     : "=&D"(edi), "=&S"(esi)
    139			     : "0"(edi), "1"(esi)
    140			     : "memory");
    141		return to;
    142	}
    143}
    144
    145#define __HAVE_ARCH_MEMCPY
    146extern void *memcpy(void *, const void *, size_t);
    147
    148#ifndef CONFIG_FORTIFY_SOURCE
    149
    150#define memcpy(t, f, n) __builtin_memcpy(t, f, n)
    151
    152#endif /* !CONFIG_FORTIFY_SOURCE */
    153
    154#define __HAVE_ARCH_MEMMOVE
    155void *memmove(void *dest, const void *src, size_t n);
    156
    157extern int memcmp(const void *, const void *, size_t);
    158#ifndef CONFIG_FORTIFY_SOURCE
    159#define memcmp __builtin_memcmp
    160#endif
    161
    162#define __HAVE_ARCH_MEMCHR
    163extern void *memchr(const void *cs, int c, size_t count);
    164
    165static inline void *__memset_generic(void *s, char c, size_t count)
    166{
    167	int d0, d1;
    168	asm volatile("rep\n\t"
    169		     "stosb"
    170		     : "=&c" (d0), "=&D" (d1)
    171		     : "a" (c), "1" (s), "0" (count)
    172		     : "memory");
    173	return s;
    174}
    175
    176/* we might want to write optimized versions of these later */
    177#define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
    178
    179/* Added by Gertjan van Wingerde to make minix and sysv module work */
    180#define __HAVE_ARCH_STRNLEN
    181extern size_t strnlen(const char *s, size_t count);
    182/* end of additional stuff */
    183
    184#define __HAVE_ARCH_STRSTR
    185extern char *strstr(const char *cs, const char *ct);
    186
    187#define __memset(s, c, count)				\
    188	(__builtin_constant_p(count)			\
    189	 ? __constant_count_memset((s), (c), (count))	\
    190	 : __memset_generic((s), (c), (count)))
    191
    192#define __HAVE_ARCH_MEMSET
    193extern void *memset(void *, int, size_t);
    194#ifndef CONFIG_FORTIFY_SOURCE
    195#define memset(s, c, count) __builtin_memset(s, c, count)
    196#endif /* !CONFIG_FORTIFY_SOURCE */
    197
    198#define __HAVE_ARCH_MEMSET16
    199static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
    200{
    201	int d0, d1;
    202	asm volatile("rep\n\t"
    203		     "stosw"
    204		     : "=&c" (d0), "=&D" (d1)
    205		     : "a" (v), "1" (s), "0" (n)
    206		     : "memory");
    207	return s;
    208}
    209
    210#define __HAVE_ARCH_MEMSET32
    211static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
    212{
    213	int d0, d1;
    214	asm volatile("rep\n\t"
    215		     "stosl"
    216		     : "=&c" (d0), "=&D" (d1)
    217		     : "a" (v), "1" (s), "0" (n)
    218		     : "memory");
    219	return s;
    220}
    221
    222/*
    223 * find the first occurrence of byte 'c', or 1 past the area if none
    224 */
    225#define __HAVE_ARCH_MEMSCAN
    226extern void *memscan(void *addr, int c, size_t size);
    227
    228#endif /* __KERNEL__ */
    229
    230#endif /* _ASM_X86_STRING_32_H */