From 2ee8bd2f14f1fe909108e89a24ec9f6de814f438 Mon Sep 17 00:00:00 2001 From: Louis Burda Date: Sat, 13 Aug 2022 20:59:43 +0200 Subject: Fixed counts read, added test for hwpf and disable attempts --- kmod/cachepc.c | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) (limited to 'kmod/cachepc.c') diff --git a/kmod/cachepc.c b/kmod/cachepc.c index 9c896f3..4e07f32 100755 --- a/kmod/cachepc.c +++ b/kmod/cachepc.c @@ -15,7 +15,6 @@ static cacheline *build_cache_ds(cache_ctx *ctx, cacheline **cacheline_ptr_arr); static void build_randomized_list_for_cache_set(cache_ctx *ctx, cacheline **cacheline_ptr_arr); static cacheline **allocate_cache_ds(cache_ctx *ctx); static uint16_t get_virt_cache_set(cache_ctx *ctx, void *ptr); -static void *aligned_alloc(size_t alignment, size_t size); void cachepc_init_pmc(uint8_t index, uint8_t event_no, uint8_t event_mask) @@ -385,7 +384,7 @@ allocate_cache_ds(cache_ctx *ctx) BUG_ON(ctx->addressing != VIRTUAL); // For virtual addressing, allocating a consecutive chunk of memory is enough - cl_arr = aligned_alloc(PAGE_SIZE, ctx->cache_size); + cl_arr = cachepc_aligned_alloc(PAGE_SIZE, ctx->cache_size); BUG_ON(cl_arr == NULL); for (i = 0; i < ctx->nr_of_cachelines; ++i) { @@ -404,7 +403,7 @@ get_virt_cache_set(cache_ctx *ctx, void *ptr) } void * -aligned_alloc(size_t alignment, size_t size) +cachepc_aligned_alloc(size_t alignment, size_t size) { void *p; -- cgit v1.2.3-71-gd317