cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

selftest.c (9937B)


      1// SPDX-License-Identifier: GPL-2.0
      2/*
      3 * KCSAN short boot-time selftests.
      4 *
      5 * Copyright (C) 2019, Google LLC.
      6 */
      7
      8#define pr_fmt(fmt) "kcsan: " fmt
      9
     10#include <linux/atomic.h>
     11#include <linux/bitops.h>
     12#include <linux/init.h>
     13#include <linux/kcsan-checks.h>
     14#include <linux/kernel.h>
     15#include <linux/printk.h>
     16#include <linux/random.h>
     17#include <linux/sched.h>
     18#include <linux/spinlock.h>
     19#include <linux/types.h>
     20
     21#include "encoding.h"
     22
     23#define ITERS_PER_TEST 2000
     24
     25/* Test requirements. */
     26static bool __init test_requires(void)
     27{
     28	/* random should be initialized for the below tests */
     29	return prandom_u32() + prandom_u32() != 0;
     30}
     31
     32/*
     33 * Test watchpoint encode and decode: check that encoding some access's info,
     34 * and then subsequent decode preserves the access's info.
     35 */
     36static bool __init test_encode_decode(void)
     37{
     38	int i;
     39
     40	for (i = 0; i < ITERS_PER_TEST; ++i) {
     41		size_t size = prandom_u32_max(MAX_ENCODABLE_SIZE) + 1;
     42		bool is_write = !!prandom_u32_max(2);
     43		unsigned long verif_masked_addr;
     44		long encoded_watchpoint;
     45		bool verif_is_write;
     46		unsigned long addr;
     47		size_t verif_size;
     48
     49		prandom_bytes(&addr, sizeof(addr));
     50		if (addr < PAGE_SIZE)
     51			addr = PAGE_SIZE;
     52
     53		if (WARN_ON(!check_encodable(addr, size)))
     54			return false;
     55
     56		encoded_watchpoint = encode_watchpoint(addr, size, is_write);
     57
     58		/* Check special watchpoints */
     59		if (WARN_ON(decode_watchpoint(INVALID_WATCHPOINT, &verif_masked_addr, &verif_size, &verif_is_write)))
     60			return false;
     61		if (WARN_ON(decode_watchpoint(CONSUMED_WATCHPOINT, &verif_masked_addr, &verif_size, &verif_is_write)))
     62			return false;
     63
     64		/* Check decoding watchpoint returns same data */
     65		if (WARN_ON(!decode_watchpoint(encoded_watchpoint, &verif_masked_addr, &verif_size, &verif_is_write)))
     66			return false;
     67		if (WARN_ON(verif_masked_addr != (addr & WATCHPOINT_ADDR_MASK)))
     68			goto fail;
     69		if (WARN_ON(verif_size != size))
     70			goto fail;
     71		if (WARN_ON(is_write != verif_is_write))
     72			goto fail;
     73
     74		continue;
     75fail:
     76		pr_err("%s fail: %s %zu bytes @ %lx -> encoded: %lx -> %s %zu bytes @ %lx\n",
     77		       __func__, is_write ? "write" : "read", size, addr, encoded_watchpoint,
     78		       verif_is_write ? "write" : "read", verif_size, verif_masked_addr);
     79		return false;
     80	}
     81
     82	return true;
     83}
     84
     85/* Test access matching function. */
     86static bool __init test_matching_access(void)
     87{
     88	if (WARN_ON(!matching_access(10, 1, 10, 1)))
     89		return false;
     90	if (WARN_ON(!matching_access(10, 2, 11, 1)))
     91		return false;
     92	if (WARN_ON(!matching_access(10, 1, 9, 2)))
     93		return false;
     94	if (WARN_ON(matching_access(10, 1, 11, 1)))
     95		return false;
     96	if (WARN_ON(matching_access(9, 1, 10, 1)))
     97		return false;
     98
     99	/*
    100	 * An access of size 0 could match another access, as demonstrated here.
    101	 * Rather than add more comparisons to 'matching_access()', which would
    102	 * end up in the fast-path for *all* checks, check_access() simply
    103	 * returns for all accesses of size 0.
    104	 */
    105	if (WARN_ON(!matching_access(8, 8, 12, 0)))
    106		return false;
    107
    108	return true;
    109}
    110
    111/*
    112 * Correct memory barrier instrumentation is critical to avoiding false
    113 * positives: simple test to check at boot certain barriers are always properly
    114 * instrumented. See kcsan_test for a more complete test.
    115 */
    116static DEFINE_SPINLOCK(test_spinlock);
    117static bool __init test_barrier(void)
    118{
    119#ifdef CONFIG_KCSAN_WEAK_MEMORY
    120	struct kcsan_scoped_access *reorder_access = &current->kcsan_ctx.reorder_access;
    121#else
    122	struct kcsan_scoped_access *reorder_access = NULL;
    123#endif
    124	bool ret = true;
    125	arch_spinlock_t arch_spinlock = __ARCH_SPIN_LOCK_UNLOCKED;
    126	atomic_t dummy;
    127	long test_var;
    128
    129	if (!reorder_access || !IS_ENABLED(CONFIG_SMP))
    130		return true;
    131
    132#define __KCSAN_CHECK_BARRIER(access_type, barrier, name)					\
    133	do {											\
    134		reorder_access->type = (access_type) | KCSAN_ACCESS_SCOPED;			\
    135		reorder_access->size = 1;							\
    136		barrier;									\
    137		if (reorder_access->size != 0) {						\
    138			pr_err("improperly instrumented type=(" #access_type "): " name "\n");	\
    139			ret = false;								\
    140		}										\
    141	} while (0)
    142#define KCSAN_CHECK_READ_BARRIER(b)  __KCSAN_CHECK_BARRIER(0, b, #b)
    143#define KCSAN_CHECK_WRITE_BARRIER(b) __KCSAN_CHECK_BARRIER(KCSAN_ACCESS_WRITE, b, #b)
    144#define KCSAN_CHECK_RW_BARRIER(b)    __KCSAN_CHECK_BARRIER(KCSAN_ACCESS_WRITE | KCSAN_ACCESS_COMPOUND, b, #b)
    145
    146	kcsan_nestable_atomic_begin(); /* No watchpoints in called functions. */
    147
    148	KCSAN_CHECK_READ_BARRIER(mb());
    149	KCSAN_CHECK_READ_BARRIER(rmb());
    150	KCSAN_CHECK_READ_BARRIER(smp_mb());
    151	KCSAN_CHECK_READ_BARRIER(smp_rmb());
    152	KCSAN_CHECK_READ_BARRIER(dma_rmb());
    153	KCSAN_CHECK_READ_BARRIER(smp_mb__before_atomic());
    154	KCSAN_CHECK_READ_BARRIER(smp_mb__after_atomic());
    155	KCSAN_CHECK_READ_BARRIER(smp_mb__after_spinlock());
    156	KCSAN_CHECK_READ_BARRIER(smp_store_mb(test_var, 0));
    157	KCSAN_CHECK_READ_BARRIER(smp_store_release(&test_var, 0));
    158	KCSAN_CHECK_READ_BARRIER(xchg(&test_var, 0));
    159	KCSAN_CHECK_READ_BARRIER(xchg_release(&test_var, 0));
    160	KCSAN_CHECK_READ_BARRIER(cmpxchg(&test_var, 0,  0));
    161	KCSAN_CHECK_READ_BARRIER(cmpxchg_release(&test_var, 0,  0));
    162	KCSAN_CHECK_READ_BARRIER(atomic_set_release(&dummy, 0));
    163	KCSAN_CHECK_READ_BARRIER(atomic_add_return(1, &dummy));
    164	KCSAN_CHECK_READ_BARRIER(atomic_add_return_release(1, &dummy));
    165	KCSAN_CHECK_READ_BARRIER(atomic_fetch_add(1, &dummy));
    166	KCSAN_CHECK_READ_BARRIER(atomic_fetch_add_release(1, &dummy));
    167	KCSAN_CHECK_READ_BARRIER(test_and_set_bit(0, &test_var));
    168	KCSAN_CHECK_READ_BARRIER(test_and_clear_bit(0, &test_var));
    169	KCSAN_CHECK_READ_BARRIER(test_and_change_bit(0, &test_var));
    170	KCSAN_CHECK_READ_BARRIER(clear_bit_unlock(0, &test_var));
    171	KCSAN_CHECK_READ_BARRIER(__clear_bit_unlock(0, &test_var));
    172	arch_spin_lock(&arch_spinlock);
    173	KCSAN_CHECK_READ_BARRIER(arch_spin_unlock(&arch_spinlock));
    174	spin_lock(&test_spinlock);
    175	KCSAN_CHECK_READ_BARRIER(spin_unlock(&test_spinlock));
    176
    177	KCSAN_CHECK_WRITE_BARRIER(mb());
    178	KCSAN_CHECK_WRITE_BARRIER(wmb());
    179	KCSAN_CHECK_WRITE_BARRIER(smp_mb());
    180	KCSAN_CHECK_WRITE_BARRIER(smp_wmb());
    181	KCSAN_CHECK_WRITE_BARRIER(dma_wmb());
    182	KCSAN_CHECK_WRITE_BARRIER(smp_mb__before_atomic());
    183	KCSAN_CHECK_WRITE_BARRIER(smp_mb__after_atomic());
    184	KCSAN_CHECK_WRITE_BARRIER(smp_mb__after_spinlock());
    185	KCSAN_CHECK_WRITE_BARRIER(smp_store_mb(test_var, 0));
    186	KCSAN_CHECK_WRITE_BARRIER(smp_store_release(&test_var, 0));
    187	KCSAN_CHECK_WRITE_BARRIER(xchg(&test_var, 0));
    188	KCSAN_CHECK_WRITE_BARRIER(xchg_release(&test_var, 0));
    189	KCSAN_CHECK_WRITE_BARRIER(cmpxchg(&test_var, 0,  0));
    190	KCSAN_CHECK_WRITE_BARRIER(cmpxchg_release(&test_var, 0,  0));
    191	KCSAN_CHECK_WRITE_BARRIER(atomic_set_release(&dummy, 0));
    192	KCSAN_CHECK_WRITE_BARRIER(atomic_add_return(1, &dummy));
    193	KCSAN_CHECK_WRITE_BARRIER(atomic_add_return_release(1, &dummy));
    194	KCSAN_CHECK_WRITE_BARRIER(atomic_fetch_add(1, &dummy));
    195	KCSAN_CHECK_WRITE_BARRIER(atomic_fetch_add_release(1, &dummy));
    196	KCSAN_CHECK_WRITE_BARRIER(test_and_set_bit(0, &test_var));
    197	KCSAN_CHECK_WRITE_BARRIER(test_and_clear_bit(0, &test_var));
    198	KCSAN_CHECK_WRITE_BARRIER(test_and_change_bit(0, &test_var));
    199	KCSAN_CHECK_WRITE_BARRIER(clear_bit_unlock(0, &test_var));
    200	KCSAN_CHECK_WRITE_BARRIER(__clear_bit_unlock(0, &test_var));
    201	arch_spin_lock(&arch_spinlock);
    202	KCSAN_CHECK_WRITE_BARRIER(arch_spin_unlock(&arch_spinlock));
    203	spin_lock(&test_spinlock);
    204	KCSAN_CHECK_WRITE_BARRIER(spin_unlock(&test_spinlock));
    205
    206	KCSAN_CHECK_RW_BARRIER(mb());
    207	KCSAN_CHECK_RW_BARRIER(wmb());
    208	KCSAN_CHECK_RW_BARRIER(rmb());
    209	KCSAN_CHECK_RW_BARRIER(smp_mb());
    210	KCSAN_CHECK_RW_BARRIER(smp_wmb());
    211	KCSAN_CHECK_RW_BARRIER(smp_rmb());
    212	KCSAN_CHECK_RW_BARRIER(dma_wmb());
    213	KCSAN_CHECK_RW_BARRIER(dma_rmb());
    214	KCSAN_CHECK_RW_BARRIER(smp_mb__before_atomic());
    215	KCSAN_CHECK_RW_BARRIER(smp_mb__after_atomic());
    216	KCSAN_CHECK_RW_BARRIER(smp_mb__after_spinlock());
    217	KCSAN_CHECK_RW_BARRIER(smp_store_mb(test_var, 0));
    218	KCSAN_CHECK_RW_BARRIER(smp_store_release(&test_var, 0));
    219	KCSAN_CHECK_RW_BARRIER(xchg(&test_var, 0));
    220	KCSAN_CHECK_RW_BARRIER(xchg_release(&test_var, 0));
    221	KCSAN_CHECK_RW_BARRIER(cmpxchg(&test_var, 0,  0));
    222	KCSAN_CHECK_RW_BARRIER(cmpxchg_release(&test_var, 0,  0));
    223	KCSAN_CHECK_RW_BARRIER(atomic_set_release(&dummy, 0));
    224	KCSAN_CHECK_RW_BARRIER(atomic_add_return(1, &dummy));
    225	KCSAN_CHECK_RW_BARRIER(atomic_add_return_release(1, &dummy));
    226	KCSAN_CHECK_RW_BARRIER(atomic_fetch_add(1, &dummy));
    227	KCSAN_CHECK_RW_BARRIER(atomic_fetch_add_release(1, &dummy));
    228	KCSAN_CHECK_RW_BARRIER(test_and_set_bit(0, &test_var));
    229	KCSAN_CHECK_RW_BARRIER(test_and_clear_bit(0, &test_var));
    230	KCSAN_CHECK_RW_BARRIER(test_and_change_bit(0, &test_var));
    231	KCSAN_CHECK_RW_BARRIER(clear_bit_unlock(0, &test_var));
    232	KCSAN_CHECK_RW_BARRIER(__clear_bit_unlock(0, &test_var));
    233	arch_spin_lock(&arch_spinlock);
    234	KCSAN_CHECK_RW_BARRIER(arch_spin_unlock(&arch_spinlock));
    235	spin_lock(&test_spinlock);
    236	KCSAN_CHECK_RW_BARRIER(spin_unlock(&test_spinlock));
    237
    238#ifdef clear_bit_unlock_is_negative_byte
    239	KCSAN_CHECK_RW_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
    240	KCSAN_CHECK_READ_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
    241	KCSAN_CHECK_WRITE_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
    242#endif
    243	kcsan_nestable_atomic_end();
    244
    245	return ret;
    246}
    247
    248static int __init kcsan_selftest(void)
    249{
    250	int passed = 0;
    251	int total = 0;
    252
    253#define RUN_TEST(do_test)                                                      \
    254	do {                                                                   \
    255		++total;                                                       \
    256		if (do_test())                                                 \
    257			++passed;                                              \
    258		else                                                           \
    259			pr_err("selftest: " #do_test " failed");               \
    260	} while (0)
    261
    262	RUN_TEST(test_requires);
    263	RUN_TEST(test_encode_decode);
    264	RUN_TEST(test_matching_access);
    265	RUN_TEST(test_barrier);
    266
    267	pr_info("selftest: %d/%d tests passed\n", passed, total);
    268	if (passed != total)
    269		panic("selftests failed");
    270	return 0;
    271}
    272postcore_initcall(kcsan_selftest);