cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

fpu.h (4623B)


      1/* SPDX-License-Identifier: GPL-2.0 */
      2
      3#ifndef __KVM_FPU_H_
      4#define __KVM_FPU_H_
      5
      6#include <asm/fpu/api.h>
      7
      8typedef u32		__attribute__((vector_size(16))) sse128_t;
      9#define __sse128_u	union { sse128_t vec; u64 as_u64[2]; u32 as_u32[4]; }
     10#define sse128_lo(x)	({ __sse128_u t; t.vec = x; t.as_u64[0]; })
     11#define sse128_hi(x)	({ __sse128_u t; t.vec = x; t.as_u64[1]; })
     12#define sse128_l0(x)	({ __sse128_u t; t.vec = x; t.as_u32[0]; })
     13#define sse128_l1(x)	({ __sse128_u t; t.vec = x; t.as_u32[1]; })
     14#define sse128_l2(x)	({ __sse128_u t; t.vec = x; t.as_u32[2]; })
     15#define sse128_l3(x)	({ __sse128_u t; t.vec = x; t.as_u32[3]; })
     16#define sse128(lo, hi)	({ __sse128_u t; t.as_u64[0] = lo; t.as_u64[1] = hi; t.vec; })
     17
     18static inline void _kvm_read_sse_reg(int reg, sse128_t *data)
     19{
     20	switch (reg) {
     21	case 0: asm("movdqa %%xmm0, %0" : "=m"(*data)); break;
     22	case 1: asm("movdqa %%xmm1, %0" : "=m"(*data)); break;
     23	case 2: asm("movdqa %%xmm2, %0" : "=m"(*data)); break;
     24	case 3: asm("movdqa %%xmm3, %0" : "=m"(*data)); break;
     25	case 4: asm("movdqa %%xmm4, %0" : "=m"(*data)); break;
     26	case 5: asm("movdqa %%xmm5, %0" : "=m"(*data)); break;
     27	case 6: asm("movdqa %%xmm6, %0" : "=m"(*data)); break;
     28	case 7: asm("movdqa %%xmm7, %0" : "=m"(*data)); break;
     29#ifdef CONFIG_X86_64
     30	case 8: asm("movdqa %%xmm8, %0" : "=m"(*data)); break;
     31	case 9: asm("movdqa %%xmm9, %0" : "=m"(*data)); break;
     32	case 10: asm("movdqa %%xmm10, %0" : "=m"(*data)); break;
     33	case 11: asm("movdqa %%xmm11, %0" : "=m"(*data)); break;
     34	case 12: asm("movdqa %%xmm12, %0" : "=m"(*data)); break;
     35	case 13: asm("movdqa %%xmm13, %0" : "=m"(*data)); break;
     36	case 14: asm("movdqa %%xmm14, %0" : "=m"(*data)); break;
     37	case 15: asm("movdqa %%xmm15, %0" : "=m"(*data)); break;
     38#endif
     39	default: BUG();
     40	}
     41}
     42
     43static inline void _kvm_write_sse_reg(int reg, const sse128_t *data)
     44{
     45	switch (reg) {
     46	case 0: asm("movdqa %0, %%xmm0" : : "m"(*data)); break;
     47	case 1: asm("movdqa %0, %%xmm1" : : "m"(*data)); break;
     48	case 2: asm("movdqa %0, %%xmm2" : : "m"(*data)); break;
     49	case 3: asm("movdqa %0, %%xmm3" : : "m"(*data)); break;
     50	case 4: asm("movdqa %0, %%xmm4" : : "m"(*data)); break;
     51	case 5: asm("movdqa %0, %%xmm5" : : "m"(*data)); break;
     52	case 6: asm("movdqa %0, %%xmm6" : : "m"(*data)); break;
     53	case 7: asm("movdqa %0, %%xmm7" : : "m"(*data)); break;
     54#ifdef CONFIG_X86_64
     55	case 8: asm("movdqa %0, %%xmm8" : : "m"(*data)); break;
     56	case 9: asm("movdqa %0, %%xmm9" : : "m"(*data)); break;
     57	case 10: asm("movdqa %0, %%xmm10" : : "m"(*data)); break;
     58	case 11: asm("movdqa %0, %%xmm11" : : "m"(*data)); break;
     59	case 12: asm("movdqa %0, %%xmm12" : : "m"(*data)); break;
     60	case 13: asm("movdqa %0, %%xmm13" : : "m"(*data)); break;
     61	case 14: asm("movdqa %0, %%xmm14" : : "m"(*data)); break;
     62	case 15: asm("movdqa %0, %%xmm15" : : "m"(*data)); break;
     63#endif
     64	default: BUG();
     65	}
     66}
     67
     68static inline void _kvm_read_mmx_reg(int reg, u64 *data)
     69{
     70	switch (reg) {
     71	case 0: asm("movq %%mm0, %0" : "=m"(*data)); break;
     72	case 1: asm("movq %%mm1, %0" : "=m"(*data)); break;
     73	case 2: asm("movq %%mm2, %0" : "=m"(*data)); break;
     74	case 3: asm("movq %%mm3, %0" : "=m"(*data)); break;
     75	case 4: asm("movq %%mm4, %0" : "=m"(*data)); break;
     76	case 5: asm("movq %%mm5, %0" : "=m"(*data)); break;
     77	case 6: asm("movq %%mm6, %0" : "=m"(*data)); break;
     78	case 7: asm("movq %%mm7, %0" : "=m"(*data)); break;
     79	default: BUG();
     80	}
     81}
     82
     83static inline void _kvm_write_mmx_reg(int reg, const u64 *data)
     84{
     85	switch (reg) {
     86	case 0: asm("movq %0, %%mm0" : : "m"(*data)); break;
     87	case 1: asm("movq %0, %%mm1" : : "m"(*data)); break;
     88	case 2: asm("movq %0, %%mm2" : : "m"(*data)); break;
     89	case 3: asm("movq %0, %%mm3" : : "m"(*data)); break;
     90	case 4: asm("movq %0, %%mm4" : : "m"(*data)); break;
     91	case 5: asm("movq %0, %%mm5" : : "m"(*data)); break;
     92	case 6: asm("movq %0, %%mm6" : : "m"(*data)); break;
     93	case 7: asm("movq %0, %%mm7" : : "m"(*data)); break;
     94	default: BUG();
     95	}
     96}
     97
     98static inline void kvm_fpu_get(void)
     99{
    100	fpregs_lock();
    101
    102	fpregs_assert_state_consistent();
    103	if (test_thread_flag(TIF_NEED_FPU_LOAD))
    104		switch_fpu_return();
    105}
    106
    107static inline void kvm_fpu_put(void)
    108{
    109	fpregs_unlock();
    110}
    111
    112static inline void kvm_read_sse_reg(int reg, sse128_t *data)
    113{
    114	kvm_fpu_get();
    115	_kvm_read_sse_reg(reg, data);
    116	kvm_fpu_put();
    117}
    118
    119static inline void kvm_write_sse_reg(int reg, const sse128_t *data)
    120{
    121	kvm_fpu_get();
    122	_kvm_write_sse_reg(reg, data);
    123	kvm_fpu_put();
    124}
    125
    126static inline void kvm_read_mmx_reg(int reg, u64 *data)
    127{
    128	kvm_fpu_get();
    129	_kvm_read_mmx_reg(reg, data);
    130	kvm_fpu_put();
    131}
    132
    133static inline void kvm_write_mmx_reg(int reg, const u64 *data)
    134{
    135	kvm_fpu_get();
    136	_kvm_write_mmx_reg(reg, data);
    137	kvm_fpu_put();
    138}
    139
    140#endif