stackinit_kunit.c (14271B)
1// SPDX-License-Identifier: GPL-2.0-or-later 2/* 3 * Test cases for compiler-based stack variable zeroing via 4 * -ftrivial-auto-var-init={zero,pattern} or CONFIG_GCC_PLUGIN_STRUCTLEAK*. 5 * For example, see: 6 * https://www.kernel.org/doc/html/latest/dev-tools/kunit/kunit-tool.html#configuring-building-and-running-tests 7 * ./tools/testing/kunit/kunit.py run stackinit [--raw_output] \ 8 * --make_option LLVM=1 \ 9 * --kconfig_add CONFIG_INIT_STACK_ALL_ZERO=y 10 * 11 */ 12#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt 13 14#include <kunit/test.h> 15#include <linux/init.h> 16#include <linux/kernel.h> 17#include <linux/module.h> 18#include <linux/string.h> 19 20/* Exfiltration buffer. */ 21#define MAX_VAR_SIZE 128 22static u8 check_buf[MAX_VAR_SIZE]; 23 24/* Character array to trigger stack protector in all functions. */ 25#define VAR_BUFFER 32 26 27/* Volatile mask to convince compiler to copy memory with 0xff. */ 28static volatile u8 forced_mask = 0xff; 29 30/* Location and size tracking to validate fill and test are colocated. */ 31static void *fill_start, *target_start; 32static size_t fill_size, target_size; 33 34static bool range_contains(char *haystack_start, size_t haystack_size, 35 char *needle_start, size_t needle_size) 36{ 37 if (needle_start >= haystack_start && 38 needle_start + needle_size <= haystack_start + haystack_size) 39 return true; 40 return false; 41} 42 43/* Whether the test is expected to fail. */ 44#define WANT_SUCCESS 0 45#define XFAIL 1 46 47#define DO_NOTHING_TYPE_SCALAR(var_type) var_type 48#define DO_NOTHING_TYPE_STRING(var_type) void 49#define DO_NOTHING_TYPE_STRUCT(var_type) void 50 51#define DO_NOTHING_RETURN_SCALAR(ptr) *(ptr) 52#define DO_NOTHING_RETURN_STRING(ptr) /**/ 53#define DO_NOTHING_RETURN_STRUCT(ptr) /**/ 54 55#define DO_NOTHING_CALL_SCALAR(var, name) \ 56 (var) = do_nothing_ ## name(&(var)) 57#define DO_NOTHING_CALL_STRING(var, name) \ 58 do_nothing_ ## name(var) 59#define DO_NOTHING_CALL_STRUCT(var, name) \ 60 do_nothing_ ## name(&(var)) 61 62#define FETCH_ARG_SCALAR(var) &var 63#define FETCH_ARG_STRING(var) var 64#define FETCH_ARG_STRUCT(var) &var 65 66#define FILL_SIZE_STRING 16 67 68#define INIT_CLONE_SCALAR /**/ 69#define INIT_CLONE_STRING [FILL_SIZE_STRING] 70#define INIT_CLONE_STRUCT /**/ 71 72#define ZERO_CLONE_SCALAR(zero) memset(&(zero), 0x00, sizeof(zero)) 73#define ZERO_CLONE_STRING(zero) memset(&(zero), 0x00, sizeof(zero)) 74/* 75 * For the struct, intentionally poison padding to see if it gets 76 * copied out in direct assignments. 77 * */ 78#define ZERO_CLONE_STRUCT(zero) \ 79 do { \ 80 memset(&(zero), 0xFF, sizeof(zero)); \ 81 zero.one = 0; \ 82 zero.two = 0; \ 83 zero.three = 0; \ 84 zero.four = 0; \ 85 } while (0) 86 87#define INIT_SCALAR_none(var_type) /**/ 88#define INIT_SCALAR_zero(var_type) = 0 89 90#define INIT_STRING_none(var_type) [FILL_SIZE_STRING] /**/ 91#define INIT_STRING_zero(var_type) [FILL_SIZE_STRING] = { } 92 93#define INIT_STRUCT_none(var_type) /**/ 94#define INIT_STRUCT_zero(var_type) = { } 95 96 97#define __static_partial { .two = 0, } 98#define __static_all { .one = 0, \ 99 .two = 0, \ 100 .three = 0, \ 101 .four = 0, \ 102 } 103#define __dynamic_partial { .two = arg->two, } 104#define __dynamic_all { .one = arg->one, \ 105 .two = arg->two, \ 106 .three = arg->three, \ 107 .four = arg->four, \ 108 } 109#define __runtime_partial var.two = 0 110#define __runtime_all var.one = 0; \ 111 var.two = 0; \ 112 var.three = 0; \ 113 var.four = 0 114 115#define INIT_STRUCT_static_partial(var_type) \ 116 = __static_partial 117#define INIT_STRUCT_static_all(var_type) \ 118 = __static_all 119#define INIT_STRUCT_dynamic_partial(var_type) \ 120 = __dynamic_partial 121#define INIT_STRUCT_dynamic_all(var_type) \ 122 = __dynamic_all 123#define INIT_STRUCT_runtime_partial(var_type) \ 124 ; __runtime_partial 125#define INIT_STRUCT_runtime_all(var_type) \ 126 ; __runtime_all 127 128#define INIT_STRUCT_assigned_static_partial(var_type) \ 129 ; var = (var_type)__static_partial 130#define INIT_STRUCT_assigned_static_all(var_type) \ 131 ; var = (var_type)__static_all 132#define INIT_STRUCT_assigned_dynamic_partial(var_type) \ 133 ; var = (var_type)__dynamic_partial 134#define INIT_STRUCT_assigned_dynamic_all(var_type) \ 135 ; var = (var_type)__dynamic_all 136 137#define INIT_STRUCT_assigned_copy(var_type) \ 138 ; var = *(arg) 139 140/* 141 * @name: unique string name for the test 142 * @var_type: type to be tested for zeroing initialization 143 * @which: is this a SCALAR, STRING, or STRUCT type? 144 * @init_level: what kind of initialization is performed 145 * @xfail: is this test expected to fail? 146 */ 147#define DEFINE_TEST_DRIVER(name, var_type, which, xfail) \ 148/* Returns 0 on success, 1 on failure. */ \ 149static noinline void test_ ## name (struct kunit *test) \ 150{ \ 151 var_type zero INIT_CLONE_ ## which; \ 152 int ignored; \ 153 u8 sum = 0, i; \ 154 \ 155 /* Notice when a new test is larger than expected. */ \ 156 BUILD_BUG_ON(sizeof(zero) > MAX_VAR_SIZE); \ 157 \ 158 /* Fill clone type with zero for per-field init. */ \ 159 ZERO_CLONE_ ## which(zero); \ 160 /* Clear entire check buffer for 0xFF overlap test. */ \ 161 memset(check_buf, 0x00, sizeof(check_buf)); \ 162 /* Fill stack with 0xFF. */ \ 163 ignored = leaf_ ##name((unsigned long)&ignored, 1, \ 164 FETCH_ARG_ ## which(zero)); \ 165 /* Verify all bytes overwritten with 0xFF. */ \ 166 for (sum = 0, i = 0; i < target_size; i++) \ 167 sum += (check_buf[i] != 0xFF); \ 168 KUNIT_ASSERT_EQ_MSG(test, sum, 0, \ 169 "leaf fill was not 0xFF!?\n"); \ 170 /* Clear entire check buffer for later bit tests. */ \ 171 memset(check_buf, 0x00, sizeof(check_buf)); \ 172 /* Extract stack-defined variable contents. */ \ 173 ignored = leaf_ ##name((unsigned long)&ignored, 0, \ 174 FETCH_ARG_ ## which(zero)); \ 175 \ 176 /* Validate that compiler lined up fill and target. */ \ 177 KUNIT_ASSERT_TRUE_MSG(test, \ 178 range_contains(fill_start, fill_size, \ 179 target_start, target_size), \ 180 "stack fill missed target!? " \ 181 "(fill %zu wide, target offset by %d)\n", \ 182 fill_size, \ 183 (int)((ssize_t)(uintptr_t)fill_start - \ 184 (ssize_t)(uintptr_t)target_start)); \ 185 \ 186 /* Look for any bytes still 0xFF in check region. */ \ 187 for (sum = 0, i = 0; i < target_size; i++) \ 188 sum += (check_buf[i] == 0xFF); \ 189 \ 190 if (sum != 0 && xfail) \ 191 kunit_skip(test, \ 192 "XFAIL uninit bytes: %d\n", \ 193 sum); \ 194 KUNIT_ASSERT_EQ_MSG(test, sum, 0, \ 195 "uninit bytes: %d\n", sum); \ 196} 197#define DEFINE_TEST(name, var_type, which, init_level, xfail) \ 198/* no-op to force compiler into ignoring "uninitialized" vars */\ 199static noinline DO_NOTHING_TYPE_ ## which(var_type) \ 200do_nothing_ ## name(var_type *ptr) \ 201{ \ 202 /* Will always be true, but compiler doesn't know. */ \ 203 if ((unsigned long)ptr > 0x2) \ 204 return DO_NOTHING_RETURN_ ## which(ptr); \ 205 else \ 206 return DO_NOTHING_RETURN_ ## which(ptr + 1); \ 207} \ 208static noinline int leaf_ ## name(unsigned long sp, bool fill, \ 209 var_type *arg) \ 210{ \ 211 char buf[VAR_BUFFER]; \ 212 var_type var \ 213 INIT_ ## which ## _ ## init_level(var_type); \ 214 \ 215 target_start = &var; \ 216 target_size = sizeof(var); \ 217 /* \ 218 * Keep this buffer around to make sure we've got a \ 219 * stack frame of SOME kind... \ 220 */ \ 221 memset(buf, (char)(sp & 0xff), sizeof(buf)); \ 222 /* Fill variable with 0xFF. */ \ 223 if (fill) { \ 224 fill_start = &var; \ 225 fill_size = sizeof(var); \ 226 memset(fill_start, \ 227 (char)((sp & 0xff) | forced_mask), \ 228 fill_size); \ 229 } \ 230 \ 231 /* Silence "never initialized" warnings. */ \ 232 DO_NOTHING_CALL_ ## which(var, name); \ 233 \ 234 /* Exfiltrate "var". */ \ 235 memcpy(check_buf, target_start, target_size); \ 236 \ 237 return (int)buf[0] | (int)buf[sizeof(buf) - 1]; \ 238} \ 239DEFINE_TEST_DRIVER(name, var_type, which, xfail) 240 241/* Structure with no padding. */ 242struct test_packed { 243 unsigned long one; 244 unsigned long two; 245 unsigned long three; 246 unsigned long four; 247}; 248 249/* Simple structure with padding likely to be covered by compiler. */ 250struct test_small_hole { 251 size_t one; 252 char two; 253 /* 3 byte padding hole here. */ 254 int three; 255 unsigned long four; 256}; 257 258/* Trigger unhandled padding in a structure. */ 259struct test_big_hole { 260 u8 one; 261 u8 two; 262 u8 three; 263 /* 61 byte padding hole here. */ 264 u8 four __aligned(64); 265} __aligned(64); 266 267struct test_trailing_hole { 268 char *one; 269 char *two; 270 char *three; 271 char four; 272 /* "sizeof(unsigned long) - 1" byte padding hole here. */ 273}; 274 275/* Test if STRUCTLEAK is clearing structs with __user fields. */ 276struct test_user { 277 u8 one; 278 unsigned long two; 279 char __user *three; 280 unsigned long four; 281}; 282 283#define ALWAYS_PASS WANT_SUCCESS 284#define ALWAYS_FAIL XFAIL 285 286#ifdef CONFIG_INIT_STACK_NONE 287# define USER_PASS XFAIL 288# define BYREF_PASS XFAIL 289# define STRONG_PASS XFAIL 290#elif defined(CONFIG_GCC_PLUGIN_STRUCTLEAK_USER) 291# define USER_PASS WANT_SUCCESS 292# define BYREF_PASS XFAIL 293# define STRONG_PASS XFAIL 294#elif defined(CONFIG_GCC_PLUGIN_STRUCTLEAK_BYREF) 295# define USER_PASS WANT_SUCCESS 296# define BYREF_PASS WANT_SUCCESS 297# define STRONG_PASS XFAIL 298#else 299# define USER_PASS WANT_SUCCESS 300# define BYREF_PASS WANT_SUCCESS 301# define STRONG_PASS WANT_SUCCESS 302#endif 303 304#define DEFINE_SCALAR_TEST(name, init, xfail) \ 305 DEFINE_TEST(name ## _ ## init, name, SCALAR, \ 306 init, xfail) 307 308#define DEFINE_SCALAR_TESTS(init, xfail) \ 309 DEFINE_SCALAR_TEST(u8, init, xfail); \ 310 DEFINE_SCALAR_TEST(u16, init, xfail); \ 311 DEFINE_SCALAR_TEST(u32, init, xfail); \ 312 DEFINE_SCALAR_TEST(u64, init, xfail); \ 313 DEFINE_TEST(char_array_ ## init, unsigned char, \ 314 STRING, init, xfail) 315 316#define DEFINE_STRUCT_TEST(name, init, xfail) \ 317 DEFINE_TEST(name ## _ ## init, \ 318 struct test_ ## name, STRUCT, init, \ 319 xfail) 320 321#define DEFINE_STRUCT_TESTS(init, xfail) \ 322 DEFINE_STRUCT_TEST(small_hole, init, xfail); \ 323 DEFINE_STRUCT_TEST(big_hole, init, xfail); \ 324 DEFINE_STRUCT_TEST(trailing_hole, init, xfail); \ 325 DEFINE_STRUCT_TEST(packed, init, xfail) 326 327#define DEFINE_STRUCT_INITIALIZER_TESTS(base, xfail) \ 328 DEFINE_STRUCT_TESTS(base ## _ ## partial, \ 329 xfail); \ 330 DEFINE_STRUCT_TESTS(base ## _ ## all, xfail) 331 332/* These should be fully initialized all the time! */ 333DEFINE_SCALAR_TESTS(zero, ALWAYS_PASS); 334DEFINE_STRUCT_TESTS(zero, ALWAYS_PASS); 335/* Struct initializers: padding may be left uninitialized. */ 336DEFINE_STRUCT_INITIALIZER_TESTS(static, STRONG_PASS); 337DEFINE_STRUCT_INITIALIZER_TESTS(dynamic, STRONG_PASS); 338DEFINE_STRUCT_INITIALIZER_TESTS(runtime, STRONG_PASS); 339DEFINE_STRUCT_INITIALIZER_TESTS(assigned_static, STRONG_PASS); 340DEFINE_STRUCT_INITIALIZER_TESTS(assigned_dynamic, STRONG_PASS); 341DEFINE_STRUCT_TESTS(assigned_copy, ALWAYS_FAIL); 342/* No initialization without compiler instrumentation. */ 343DEFINE_SCALAR_TESTS(none, STRONG_PASS); 344DEFINE_STRUCT_TESTS(none, BYREF_PASS); 345/* Initialization of members with __user attribute. */ 346DEFINE_TEST(user, struct test_user, STRUCT, none, USER_PASS); 347 348/* 349 * Check two uses through a variable declaration outside either path, 350 * which was noticed as a special case in porting earlier stack init 351 * compiler logic. 352 */ 353static int noinline __leaf_switch_none(int path, bool fill) 354{ 355 switch (path) { 356 /* 357 * This is intentionally unreachable. To silence the 358 * warning, build with -Wno-switch-unreachable 359 */ 360 uint64_t var[10]; 361 362 case 1: 363 target_start = &var; 364 target_size = sizeof(var); 365 if (fill) { 366 fill_start = &var; 367 fill_size = sizeof(var); 368 369 memset(fill_start, forced_mask | 0x55, fill_size); 370 } 371 memcpy(check_buf, target_start, target_size); 372 break; 373 case 2: 374 target_start = &var; 375 target_size = sizeof(var); 376 if (fill) { 377 fill_start = &var; 378 fill_size = sizeof(var); 379 380 memset(fill_start, forced_mask | 0xaa, fill_size); 381 } 382 memcpy(check_buf, target_start, target_size); 383 break; 384 default: 385 var[1] = 5; 386 return var[1] & forced_mask; 387 } 388 return 0; 389} 390 391static noinline int leaf_switch_1_none(unsigned long sp, bool fill, 392 uint64_t *arg) 393{ 394 return __leaf_switch_none(1, fill); 395} 396 397static noinline int leaf_switch_2_none(unsigned long sp, bool fill, 398 uint64_t *arg) 399{ 400 return __leaf_switch_none(2, fill); 401} 402 403/* 404 * These are expected to fail for most configurations because neither 405 * GCC nor Clang have a way to perform initialization of variables in 406 * non-code areas (i.e. in a switch statement before the first "case"). 407 * https://bugs.llvm.org/show_bug.cgi?id=44916 408 */ 409DEFINE_TEST_DRIVER(switch_1_none, uint64_t, SCALAR, ALWAYS_FAIL); 410DEFINE_TEST_DRIVER(switch_2_none, uint64_t, SCALAR, ALWAYS_FAIL); 411 412#define KUNIT_test_scalars(init) \ 413 KUNIT_CASE(test_u8_ ## init), \ 414 KUNIT_CASE(test_u16_ ## init), \ 415 KUNIT_CASE(test_u32_ ## init), \ 416 KUNIT_CASE(test_u64_ ## init), \ 417 KUNIT_CASE(test_char_array_ ## init) 418 419#define KUNIT_test_structs(init) \ 420 KUNIT_CASE(test_small_hole_ ## init), \ 421 KUNIT_CASE(test_big_hole_ ## init), \ 422 KUNIT_CASE(test_trailing_hole_ ## init),\ 423 KUNIT_CASE(test_packed_ ## init) \ 424 425static struct kunit_case stackinit_test_cases[] = { 426 /* These are explicitly initialized and should always pass. */ 427 KUNIT_test_scalars(zero), 428 KUNIT_test_structs(zero), 429 /* Padding here appears to be accidentally always initialized? */ 430 KUNIT_test_structs(dynamic_partial), 431 KUNIT_test_structs(assigned_dynamic_partial), 432 /* Padding initialization depends on compiler behaviors. */ 433 KUNIT_test_structs(static_partial), 434 KUNIT_test_structs(static_all), 435 KUNIT_test_structs(dynamic_all), 436 KUNIT_test_structs(runtime_partial), 437 KUNIT_test_structs(runtime_all), 438 KUNIT_test_structs(assigned_static_partial), 439 KUNIT_test_structs(assigned_static_all), 440 KUNIT_test_structs(assigned_dynamic_all), 441 /* Everything fails this since it effectively performs a memcpy(). */ 442 KUNIT_test_structs(assigned_copy), 443 /* STRUCTLEAK_BYREF_ALL should cover everything from here down. */ 444 KUNIT_test_scalars(none), 445 KUNIT_CASE(test_switch_1_none), 446 KUNIT_CASE(test_switch_2_none), 447 /* STRUCTLEAK_BYREF should cover from here down. */ 448 KUNIT_test_structs(none), 449 /* STRUCTLEAK will only cover this. */ 450 KUNIT_CASE(test_user), 451 {} 452}; 453 454static struct kunit_suite stackinit_test_suite = { 455 .name = "stackinit", 456 .test_cases = stackinit_test_cases, 457}; 458 459kunit_test_suites(&stackinit_test_suite); 460 461MODULE_LICENSE("GPL");