cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

test_bpf.c (364830B)


      1// SPDX-License-Identifier: GPL-2.0-only
      2/*
      3 * Testsuite for BPF interpreter and BPF JIT compiler
      4 *
      5 * Copyright (c) 2011-2014 PLUMgrid, http://plumgrid.com
      6 */
      7
      8#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
      9
     10#include <linux/init.h>
     11#include <linux/module.h>
     12#include <linux/filter.h>
     13#include <linux/bpf.h>
     14#include <linux/skbuff.h>
     15#include <linux/netdevice.h>
     16#include <linux/if_vlan.h>
     17#include <linux/random.h>
     18#include <linux/highmem.h>
     19#include <linux/sched.h>
     20
     21/* General test specific settings */
     22#define MAX_SUBTESTS	3
     23#define MAX_TESTRUNS	1000
     24#define MAX_DATA	128
     25#define MAX_INSNS	512
     26#define MAX_K		0xffffFFFF
     27
     28/* Few constants used to init test 'skb' */
     29#define SKB_TYPE	3
     30#define SKB_MARK	0x1234aaaa
     31#define SKB_HASH	0x1234aaab
     32#define SKB_QUEUE_MAP	123
     33#define SKB_VLAN_TCI	0xffff
     34#define SKB_VLAN_PRESENT	1
     35#define SKB_DEV_IFINDEX	577
     36#define SKB_DEV_TYPE	588
     37
     38/* Redefine REGs to make tests less verbose */
     39#define R0		BPF_REG_0
     40#define R1		BPF_REG_1
     41#define R2		BPF_REG_2
     42#define R3		BPF_REG_3
     43#define R4		BPF_REG_4
     44#define R5		BPF_REG_5
     45#define R6		BPF_REG_6
     46#define R7		BPF_REG_7
     47#define R8		BPF_REG_8
     48#define R9		BPF_REG_9
     49#define R10		BPF_REG_10
     50
     51/* Flags that can be passed to test cases */
     52#define FLAG_NO_DATA		BIT(0)
     53#define FLAG_EXPECTED_FAIL	BIT(1)
     54#define FLAG_SKB_FRAG		BIT(2)
     55#define FLAG_VERIFIER_ZEXT	BIT(3)
     56#define FLAG_LARGE_MEM		BIT(4)
     57
     58enum {
     59	CLASSIC  = BIT(6),	/* Old BPF instructions only. */
     60	INTERNAL = BIT(7),	/* Extended instruction set.  */
     61};
     62
     63#define TEST_TYPE_MASK		(CLASSIC | INTERNAL)
     64
     65struct bpf_test {
     66	const char *descr;
     67	union {
     68		struct sock_filter insns[MAX_INSNS];
     69		struct bpf_insn insns_int[MAX_INSNS];
     70		struct {
     71			void *insns;
     72			unsigned int len;
     73		} ptr;
     74	} u;
     75	__u8 aux;
     76	__u8 data[MAX_DATA];
     77	struct {
     78		int data_size;
     79		__u32 result;
     80	} test[MAX_SUBTESTS];
     81	int (*fill_helper)(struct bpf_test *self);
     82	int expected_errcode; /* used when FLAG_EXPECTED_FAIL is set in the aux */
     83	__u8 frag_data[MAX_DATA];
     84	int stack_depth; /* for eBPF only, since tests don't call verifier */
     85	int nr_testruns; /* Custom run count, defaults to MAX_TESTRUNS if 0 */
     86};
     87
     88/* Large test cases need separate allocation and fill handler. */
     89
     90static int bpf_fill_maxinsns1(struct bpf_test *self)
     91{
     92	unsigned int len = BPF_MAXINSNS;
     93	struct sock_filter *insn;
     94	__u32 k = ~0;
     95	int i;
     96
     97	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
     98	if (!insn)
     99		return -ENOMEM;
    100
    101	for (i = 0; i < len; i++, k--)
    102		insn[i] = __BPF_STMT(BPF_RET | BPF_K, k);
    103
    104	self->u.ptr.insns = insn;
    105	self->u.ptr.len = len;
    106
    107	return 0;
    108}
    109
    110static int bpf_fill_maxinsns2(struct bpf_test *self)
    111{
    112	unsigned int len = BPF_MAXINSNS;
    113	struct sock_filter *insn;
    114	int i;
    115
    116	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    117	if (!insn)
    118		return -ENOMEM;
    119
    120	for (i = 0; i < len; i++)
    121		insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
    122
    123	self->u.ptr.insns = insn;
    124	self->u.ptr.len = len;
    125
    126	return 0;
    127}
    128
    129static int bpf_fill_maxinsns3(struct bpf_test *self)
    130{
    131	unsigned int len = BPF_MAXINSNS;
    132	struct sock_filter *insn;
    133	struct rnd_state rnd;
    134	int i;
    135
    136	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    137	if (!insn)
    138		return -ENOMEM;
    139
    140	prandom_seed_state(&rnd, 3141592653589793238ULL);
    141
    142	for (i = 0; i < len - 1; i++) {
    143		__u32 k = prandom_u32_state(&rnd);
    144
    145		insn[i] = __BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, k);
    146	}
    147
    148	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
    149
    150	self->u.ptr.insns = insn;
    151	self->u.ptr.len = len;
    152
    153	return 0;
    154}
    155
    156static int bpf_fill_maxinsns4(struct bpf_test *self)
    157{
    158	unsigned int len = BPF_MAXINSNS + 1;
    159	struct sock_filter *insn;
    160	int i;
    161
    162	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    163	if (!insn)
    164		return -ENOMEM;
    165
    166	for (i = 0; i < len; i++)
    167		insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
    168
    169	self->u.ptr.insns = insn;
    170	self->u.ptr.len = len;
    171
    172	return 0;
    173}
    174
    175static int bpf_fill_maxinsns5(struct bpf_test *self)
    176{
    177	unsigned int len = BPF_MAXINSNS;
    178	struct sock_filter *insn;
    179	int i;
    180
    181	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    182	if (!insn)
    183		return -ENOMEM;
    184
    185	insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
    186
    187	for (i = 1; i < len - 1; i++)
    188		insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
    189
    190	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
    191
    192	self->u.ptr.insns = insn;
    193	self->u.ptr.len = len;
    194
    195	return 0;
    196}
    197
    198static int bpf_fill_maxinsns6(struct bpf_test *self)
    199{
    200	unsigned int len = BPF_MAXINSNS;
    201	struct sock_filter *insn;
    202	int i;
    203
    204	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    205	if (!insn)
    206		return -ENOMEM;
    207
    208	for (i = 0; i < len - 1; i++)
    209		insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
    210				     SKF_AD_VLAN_TAG_PRESENT);
    211
    212	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
    213
    214	self->u.ptr.insns = insn;
    215	self->u.ptr.len = len;
    216
    217	return 0;
    218}
    219
    220static int bpf_fill_maxinsns7(struct bpf_test *self)
    221{
    222	unsigned int len = BPF_MAXINSNS;
    223	struct sock_filter *insn;
    224	int i;
    225
    226	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    227	if (!insn)
    228		return -ENOMEM;
    229
    230	for (i = 0; i < len - 4; i++)
    231		insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
    232				     SKF_AD_CPU);
    233
    234	insn[len - 4] = __BPF_STMT(BPF_MISC | BPF_TAX, 0);
    235	insn[len - 3] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
    236				   SKF_AD_CPU);
    237	insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0);
    238	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
    239
    240	self->u.ptr.insns = insn;
    241	self->u.ptr.len = len;
    242
    243	return 0;
    244}
    245
    246static int bpf_fill_maxinsns8(struct bpf_test *self)
    247{
    248	unsigned int len = BPF_MAXINSNS;
    249	struct sock_filter *insn;
    250	int i, jmp_off = len - 3;
    251
    252	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    253	if (!insn)
    254		return -ENOMEM;
    255
    256	insn[0] = __BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff);
    257
    258	for (i = 1; i < len - 1; i++)
    259		insn[i] = __BPF_JUMP(BPF_JMP | BPF_JGT, 0xffffffff, jmp_off--, 0);
    260
    261	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
    262
    263	self->u.ptr.insns = insn;
    264	self->u.ptr.len = len;
    265
    266	return 0;
    267}
    268
    269static int bpf_fill_maxinsns9(struct bpf_test *self)
    270{
    271	unsigned int len = BPF_MAXINSNS;
    272	struct bpf_insn *insn;
    273	int i;
    274
    275	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    276	if (!insn)
    277		return -ENOMEM;
    278
    279	insn[0] = BPF_JMP_IMM(BPF_JA, 0, 0, len - 2);
    280	insn[1] = BPF_ALU32_IMM(BPF_MOV, R0, 0xcbababab);
    281	insn[2] = BPF_EXIT_INSN();
    282
    283	for (i = 3; i < len - 2; i++)
    284		insn[i] = BPF_ALU32_IMM(BPF_MOV, R0, 0xfefefefe);
    285
    286	insn[len - 2] = BPF_EXIT_INSN();
    287	insn[len - 1] = BPF_JMP_IMM(BPF_JA, 0, 0, -(len - 1));
    288
    289	self->u.ptr.insns = insn;
    290	self->u.ptr.len = len;
    291
    292	return 0;
    293}
    294
    295static int bpf_fill_maxinsns10(struct bpf_test *self)
    296{
    297	unsigned int len = BPF_MAXINSNS, hlen = len - 2;
    298	struct bpf_insn *insn;
    299	int i;
    300
    301	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    302	if (!insn)
    303		return -ENOMEM;
    304
    305	for (i = 0; i < hlen / 2; i++)
    306		insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 2 - 2 * i);
    307	for (i = hlen - 1; i > hlen / 2; i--)
    308		insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 1 - 2 * i);
    309
    310	insn[hlen / 2] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen / 2 - 1);
    311	insn[hlen]     = BPF_ALU32_IMM(BPF_MOV, R0, 0xabababac);
    312	insn[hlen + 1] = BPF_EXIT_INSN();
    313
    314	self->u.ptr.insns = insn;
    315	self->u.ptr.len = len;
    316
    317	return 0;
    318}
    319
    320static int __bpf_fill_ja(struct bpf_test *self, unsigned int len,
    321			 unsigned int plen)
    322{
    323	struct sock_filter *insn;
    324	unsigned int rlen;
    325	int i, j;
    326
    327	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    328	if (!insn)
    329		return -ENOMEM;
    330
    331	rlen = (len % plen) - 1;
    332
    333	for (i = 0; i + plen < len; i += plen)
    334		for (j = 0; j < plen; j++)
    335			insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA,
    336						 plen - 1 - j, 0, 0);
    337	for (j = 0; j < rlen; j++)
    338		insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA, rlen - 1 - j,
    339					 0, 0);
    340
    341	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xababcbac);
    342
    343	self->u.ptr.insns = insn;
    344	self->u.ptr.len = len;
    345
    346	return 0;
    347}
    348
    349static int bpf_fill_maxinsns11(struct bpf_test *self)
    350{
    351	/* Hits 70 passes on x86_64 and triggers NOPs padding. */
    352	return __bpf_fill_ja(self, BPF_MAXINSNS, 68);
    353}
    354
    355static int bpf_fill_maxinsns12(struct bpf_test *self)
    356{
    357	unsigned int len = BPF_MAXINSNS;
    358	struct sock_filter *insn;
    359	int i = 0;
    360
    361	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    362	if (!insn)
    363		return -ENOMEM;
    364
    365	insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
    366
    367	for (i = 1; i < len - 1; i++)
    368		insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
    369
    370	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
    371
    372	self->u.ptr.insns = insn;
    373	self->u.ptr.len = len;
    374
    375	return 0;
    376}
    377
    378static int bpf_fill_maxinsns13(struct bpf_test *self)
    379{
    380	unsigned int len = BPF_MAXINSNS;
    381	struct sock_filter *insn;
    382	int i = 0;
    383
    384	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    385	if (!insn)
    386		return -ENOMEM;
    387
    388	for (i = 0; i < len - 3; i++)
    389		insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
    390
    391	insn[len - 3] = __BPF_STMT(BPF_LD | BPF_IMM, 0xabababab);
    392	insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0);
    393	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
    394
    395	self->u.ptr.insns = insn;
    396	self->u.ptr.len = len;
    397
    398	return 0;
    399}
    400
    401static int bpf_fill_ja(struct bpf_test *self)
    402{
    403	/* Hits exactly 11 passes on x86_64 JIT. */
    404	return __bpf_fill_ja(self, 12, 9);
    405}
    406
    407static int bpf_fill_ld_abs_get_processor_id(struct bpf_test *self)
    408{
    409	unsigned int len = BPF_MAXINSNS;
    410	struct sock_filter *insn;
    411	int i;
    412
    413	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    414	if (!insn)
    415		return -ENOMEM;
    416
    417	for (i = 0; i < len - 1; i += 2) {
    418		insn[i] = __BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 0);
    419		insn[i + 1] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
    420					 SKF_AD_OFF + SKF_AD_CPU);
    421	}
    422
    423	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xbee);
    424
    425	self->u.ptr.insns = insn;
    426	self->u.ptr.len = len;
    427
    428	return 0;
    429}
    430
    431static int __bpf_fill_stxdw(struct bpf_test *self, int size)
    432{
    433	unsigned int len = BPF_MAXINSNS;
    434	struct bpf_insn *insn;
    435	int i;
    436
    437	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    438	if (!insn)
    439		return -ENOMEM;
    440
    441	insn[0] = BPF_ALU32_IMM(BPF_MOV, R0, 1);
    442	insn[1] = BPF_ST_MEM(size, R10, -40, 42);
    443
    444	for (i = 2; i < len - 2; i++)
    445		insn[i] = BPF_STX_XADD(size, R10, R0, -40);
    446
    447	insn[len - 2] = BPF_LDX_MEM(size, R0, R10, -40);
    448	insn[len - 1] = BPF_EXIT_INSN();
    449
    450	self->u.ptr.insns = insn;
    451	self->u.ptr.len = len;
    452	self->stack_depth = 40;
    453
    454	return 0;
    455}
    456
    457static int bpf_fill_stxw(struct bpf_test *self)
    458{
    459	return __bpf_fill_stxdw(self, BPF_W);
    460}
    461
    462static int bpf_fill_stxdw(struct bpf_test *self)
    463{
    464	return __bpf_fill_stxdw(self, BPF_DW);
    465}
    466
    467static int __bpf_ld_imm64(struct bpf_insn insns[2], u8 reg, s64 imm64)
    468{
    469	struct bpf_insn tmp[] = {BPF_LD_IMM64(reg, imm64)};
    470
    471	memcpy(insns, tmp, sizeof(tmp));
    472	return 2;
    473}
    474
    475/*
    476 * Branch conversion tests. Complex operations can expand to a lot
    477 * of instructions when JITed. This in turn may cause jump offsets
    478 * to overflow the field size of the native instruction, triggering
    479 * a branch conversion mechanism in some JITs.
    480 */
    481static int __bpf_fill_max_jmp(struct bpf_test *self, int jmp, int imm)
    482{
    483	struct bpf_insn *insns;
    484	int len = S16_MAX + 5;
    485	int i;
    486
    487	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
    488	if (!insns)
    489		return -ENOMEM;
    490
    491	i = __bpf_ld_imm64(insns, R1, 0x0123456789abcdefULL);
    492	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
    493	insns[i++] = BPF_JMP_IMM(jmp, R0, imm, S16_MAX);
    494	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 2);
    495	insns[i++] = BPF_EXIT_INSN();
    496
    497	while (i < len - 1) {
    498		static const int ops[] = {
    499			BPF_LSH, BPF_RSH, BPF_ARSH, BPF_ADD,
    500			BPF_SUB, BPF_MUL, BPF_DIV, BPF_MOD,
    501		};
    502		int op = ops[(i >> 1) % ARRAY_SIZE(ops)];
    503
    504		if (i & 1)
    505			insns[i++] = BPF_ALU32_REG(op, R0, R1);
    506		else
    507			insns[i++] = BPF_ALU64_REG(op, R0, R1);
    508	}
    509
    510	insns[i++] = BPF_EXIT_INSN();
    511	self->u.ptr.insns = insns;
    512	self->u.ptr.len = len;
    513	BUG_ON(i != len);
    514
    515	return 0;
    516}
    517
    518/* Branch taken by runtime decision */
    519static int bpf_fill_max_jmp_taken(struct bpf_test *self)
    520{
    521	return __bpf_fill_max_jmp(self, BPF_JEQ, 1);
    522}
    523
    524/* Branch not taken by runtime decision */
    525static int bpf_fill_max_jmp_not_taken(struct bpf_test *self)
    526{
    527	return __bpf_fill_max_jmp(self, BPF_JEQ, 0);
    528}
    529
    530/* Branch always taken, known at JIT time */
    531static int bpf_fill_max_jmp_always_taken(struct bpf_test *self)
    532{
    533	return __bpf_fill_max_jmp(self, BPF_JGE, 0);
    534}
    535
    536/* Branch never taken, known at JIT time */
    537static int bpf_fill_max_jmp_never_taken(struct bpf_test *self)
    538{
    539	return __bpf_fill_max_jmp(self, BPF_JLT, 0);
    540}
    541
    542/* ALU result computation used in tests */
    543static bool __bpf_alu_result(u64 *res, u64 v1, u64 v2, u8 op)
    544{
    545	*res = 0;
    546	switch (op) {
    547	case BPF_MOV:
    548		*res = v2;
    549		break;
    550	case BPF_AND:
    551		*res = v1 & v2;
    552		break;
    553	case BPF_OR:
    554		*res = v1 | v2;
    555		break;
    556	case BPF_XOR:
    557		*res = v1 ^ v2;
    558		break;
    559	case BPF_LSH:
    560		*res = v1 << v2;
    561		break;
    562	case BPF_RSH:
    563		*res = v1 >> v2;
    564		break;
    565	case BPF_ARSH:
    566		*res = v1 >> v2;
    567		if (v2 > 0 && v1 > S64_MAX)
    568			*res |= ~0ULL << (64 - v2);
    569		break;
    570	case BPF_ADD:
    571		*res = v1 + v2;
    572		break;
    573	case BPF_SUB:
    574		*res = v1 - v2;
    575		break;
    576	case BPF_MUL:
    577		*res = v1 * v2;
    578		break;
    579	case BPF_DIV:
    580		if (v2 == 0)
    581			return false;
    582		*res = div64_u64(v1, v2);
    583		break;
    584	case BPF_MOD:
    585		if (v2 == 0)
    586			return false;
    587		div64_u64_rem(v1, v2, res);
    588		break;
    589	}
    590	return true;
    591}
    592
    593/* Test an ALU shift operation for all valid shift values */
    594static int __bpf_fill_alu_shift(struct bpf_test *self, u8 op,
    595				u8 mode, bool alu32)
    596{
    597	static const s64 regs[] = {
    598		0x0123456789abcdefLL, /* dword > 0, word < 0 */
    599		0xfedcba9876543210LL, /* dowrd < 0, word > 0 */
    600		0xfedcba0198765432LL, /* dowrd < 0, word < 0 */
    601		0x0123458967abcdefLL, /* dword > 0, word > 0 */
    602	};
    603	int bits = alu32 ? 32 : 64;
    604	int len = (2 + 7 * bits) * ARRAY_SIZE(regs) + 3;
    605	struct bpf_insn *insn;
    606	int imm, k;
    607	int i = 0;
    608
    609	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    610	if (!insn)
    611		return -ENOMEM;
    612
    613	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
    614
    615	for (k = 0; k < ARRAY_SIZE(regs); k++) {
    616		s64 reg = regs[k];
    617
    618		i += __bpf_ld_imm64(&insn[i], R3, reg);
    619
    620		for (imm = 0; imm < bits; imm++) {
    621			u64 val;
    622
    623			/* Perform operation */
    624			insn[i++] = BPF_ALU64_REG(BPF_MOV, R1, R3);
    625			insn[i++] = BPF_ALU64_IMM(BPF_MOV, R2, imm);
    626			if (alu32) {
    627				if (mode == BPF_K)
    628					insn[i++] = BPF_ALU32_IMM(op, R1, imm);
    629				else
    630					insn[i++] = BPF_ALU32_REG(op, R1, R2);
    631
    632				if (op == BPF_ARSH)
    633					reg = (s32)reg;
    634				else
    635					reg = (u32)reg;
    636				__bpf_alu_result(&val, reg, imm, op);
    637				val = (u32)val;
    638			} else {
    639				if (mode == BPF_K)
    640					insn[i++] = BPF_ALU64_IMM(op, R1, imm);
    641				else
    642					insn[i++] = BPF_ALU64_REG(op, R1, R2);
    643				__bpf_alu_result(&val, reg, imm, op);
    644			}
    645
    646			/*
    647			 * When debugging a JIT that fails this test, one
    648			 * can write the immediate value to R0 here to find
    649			 * out which operand values that fail.
    650			 */
    651
    652			/* Load reference and check the result */
    653			i += __bpf_ld_imm64(&insn[i], R4, val);
    654			insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R4, 1);
    655			insn[i++] = BPF_EXIT_INSN();
    656		}
    657	}
    658
    659	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
    660	insn[i++] = BPF_EXIT_INSN();
    661
    662	self->u.ptr.insns = insn;
    663	self->u.ptr.len = len;
    664	BUG_ON(i != len);
    665
    666	return 0;
    667}
    668
    669static int bpf_fill_alu64_lsh_imm(struct bpf_test *self)
    670{
    671	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, false);
    672}
    673
    674static int bpf_fill_alu64_rsh_imm(struct bpf_test *self)
    675{
    676	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, false);
    677}
    678
    679static int bpf_fill_alu64_arsh_imm(struct bpf_test *self)
    680{
    681	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, false);
    682}
    683
    684static int bpf_fill_alu64_lsh_reg(struct bpf_test *self)
    685{
    686	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, false);
    687}
    688
    689static int bpf_fill_alu64_rsh_reg(struct bpf_test *self)
    690{
    691	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, false);
    692}
    693
    694static int bpf_fill_alu64_arsh_reg(struct bpf_test *self)
    695{
    696	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, false);
    697}
    698
    699static int bpf_fill_alu32_lsh_imm(struct bpf_test *self)
    700{
    701	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, true);
    702}
    703
    704static int bpf_fill_alu32_rsh_imm(struct bpf_test *self)
    705{
    706	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, true);
    707}
    708
    709static int bpf_fill_alu32_arsh_imm(struct bpf_test *self)
    710{
    711	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, true);
    712}
    713
    714static int bpf_fill_alu32_lsh_reg(struct bpf_test *self)
    715{
    716	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, true);
    717}
    718
    719static int bpf_fill_alu32_rsh_reg(struct bpf_test *self)
    720{
    721	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, true);
    722}
    723
    724static int bpf_fill_alu32_arsh_reg(struct bpf_test *self)
    725{
    726	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, true);
    727}
    728
    729/*
    730 * Test an ALU register shift operation for all valid shift values
    731 * for the case when the source and destination are the same.
    732 */
    733static int __bpf_fill_alu_shift_same_reg(struct bpf_test *self, u8 op,
    734					 bool alu32)
    735{
    736	int bits = alu32 ? 32 : 64;
    737	int len = 3 + 6 * bits;
    738	struct bpf_insn *insn;
    739	int i = 0;
    740	u64 val;
    741
    742	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
    743	if (!insn)
    744		return -ENOMEM;
    745
    746	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
    747
    748	for (val = 0; val < bits; val++) {
    749		u64 res;
    750
    751		/* Perform operation */
    752		insn[i++] = BPF_ALU64_IMM(BPF_MOV, R1, val);
    753		if (alu32)
    754			insn[i++] = BPF_ALU32_REG(op, R1, R1);
    755		else
    756			insn[i++] = BPF_ALU64_REG(op, R1, R1);
    757
    758		/* Compute the reference result */
    759		__bpf_alu_result(&res, val, val, op);
    760		if (alu32)
    761			res = (u32)res;
    762		i += __bpf_ld_imm64(&insn[i], R2, res);
    763
    764		/* Check the actual result */
    765		insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
    766		insn[i++] = BPF_EXIT_INSN();
    767	}
    768
    769	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
    770	insn[i++] = BPF_EXIT_INSN();
    771
    772	self->u.ptr.insns = insn;
    773	self->u.ptr.len = len;
    774	BUG_ON(i != len);
    775
    776	return 0;
    777}
    778
    779static int bpf_fill_alu64_lsh_same_reg(struct bpf_test *self)
    780{
    781	return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, false);
    782}
    783
    784static int bpf_fill_alu64_rsh_same_reg(struct bpf_test *self)
    785{
    786	return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, false);
    787}
    788
    789static int bpf_fill_alu64_arsh_same_reg(struct bpf_test *self)
    790{
    791	return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, false);
    792}
    793
    794static int bpf_fill_alu32_lsh_same_reg(struct bpf_test *self)
    795{
    796	return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, true);
    797}
    798
    799static int bpf_fill_alu32_rsh_same_reg(struct bpf_test *self)
    800{
    801	return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, true);
    802}
    803
    804static int bpf_fill_alu32_arsh_same_reg(struct bpf_test *self)
    805{
    806	return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, true);
    807}
    808
    809/*
    810 * Common operand pattern generator for exhaustive power-of-two magnitudes
    811 * tests. The block size parameters can be adjusted to increase/reduce the
    812 * number of combinatons tested and thereby execution speed and memory
    813 * footprint.
    814 */
    815
    816static inline s64 value(int msb, int delta, int sign)
    817{
    818	return sign * (1LL << msb) + delta;
    819}
    820
    821static int __bpf_fill_pattern(struct bpf_test *self, void *arg,
    822			      int dbits, int sbits, int block1, int block2,
    823			      int (*emit)(struct bpf_test*, void*,
    824					  struct bpf_insn*, s64, s64))
    825{
    826	static const int sgn[][2] = {{1, 1}, {1, -1}, {-1, 1}, {-1, -1}};
    827	struct bpf_insn *insns;
    828	int di, si, bt, db, sb;
    829	int count, len, k;
    830	int extra = 1 + 2;
    831	int i = 0;
    832
    833	/* Total number of iterations for the two pattern */
    834	count = (dbits - 1) * (sbits - 1) * block1 * block1 * ARRAY_SIZE(sgn);
    835	count += (max(dbits, sbits) - 1) * block2 * block2 * ARRAY_SIZE(sgn);
    836
    837	/* Compute the maximum number of insns and allocate the buffer */
    838	len = extra + count * (*emit)(self, arg, NULL, 0, 0);
    839	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
    840	if (!insns)
    841		return -ENOMEM;
    842
    843	/* Add head instruction(s) */
    844	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
    845
    846	/*
    847	 * Pattern 1: all combinations of power-of-two magnitudes and sign,
    848	 * and with a block of contiguous values around each magnitude.
    849	 */
    850	for (di = 0; di < dbits - 1; di++)                 /* Dst magnitudes */
    851		for (si = 0; si < sbits - 1; si++)         /* Src magnitudes */
    852			for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
    853				for (db = -(block1 / 2);
    854				     db < (block1 + 1) / 2; db++)
    855					for (sb = -(block1 / 2);
    856					     sb < (block1 + 1) / 2; sb++) {
    857						s64 dst, src;
    858
    859						dst = value(di, db, sgn[k][0]);
    860						src = value(si, sb, sgn[k][1]);
    861						i += (*emit)(self, arg,
    862							     &insns[i],
    863							     dst, src);
    864					}
    865	/*
    866	 * Pattern 2: all combinations for a larger block of values
    867	 * for each power-of-two magnitude and sign, where the magnitude is
    868	 * the same for both operands.
    869	 */
    870	for (bt = 0; bt < max(dbits, sbits) - 1; bt++)        /* Magnitude   */
    871		for (k = 0; k < ARRAY_SIZE(sgn); k++)         /* Sign combos */
    872			for (db = -(block2 / 2); db < (block2 + 1) / 2; db++)
    873				for (sb = -(block2 / 2);
    874				     sb < (block2 + 1) / 2; sb++) {
    875					s64 dst, src;
    876
    877					dst = value(bt % dbits, db, sgn[k][0]);
    878					src = value(bt % sbits, sb, sgn[k][1]);
    879					i += (*emit)(self, arg, &insns[i],
    880						     dst, src);
    881				}
    882
    883	/* Append tail instructions */
    884	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
    885	insns[i++] = BPF_EXIT_INSN();
    886	BUG_ON(i > len);
    887
    888	self->u.ptr.insns = insns;
    889	self->u.ptr.len = i;
    890
    891	return 0;
    892}
    893
    894/*
    895 * Block size parameters used in pattern tests below. une as needed to
    896 * increase/reduce the number combinations tested, see following examples.
    897 *        block   values per operand MSB
    898 * ----------------------------------------
    899 *           0     none
    900 *           1     (1 << MSB)
    901 *           2     (1 << MSB) + [-1, 0]
    902 *           3     (1 << MSB) + [-1, 0, 1]
    903 */
    904#define PATTERN_BLOCK1 1
    905#define PATTERN_BLOCK2 5
    906
    907/* Number of test runs for a pattern test */
    908#define NR_PATTERN_RUNS 1
    909
    910/*
    911 * Exhaustive tests of ALU operations for all combinations of power-of-two
    912 * magnitudes of the operands, both for positive and negative values. The
    913 * test is designed to verify e.g. the ALU and ALU64 operations for JITs that
    914 * emit different code depending on the magnitude of the immediate value.
    915 */
    916static int __bpf_emit_alu64_imm(struct bpf_test *self, void *arg,
    917				struct bpf_insn *insns, s64 dst, s64 imm)
    918{
    919	int op = *(int *)arg;
    920	int i = 0;
    921	u64 res;
    922
    923	if (!insns)
    924		return 7;
    925
    926	if (__bpf_alu_result(&res, dst, (s32)imm, op)) {
    927		i += __bpf_ld_imm64(&insns[i], R1, dst);
    928		i += __bpf_ld_imm64(&insns[i], R3, res);
    929		insns[i++] = BPF_ALU64_IMM(op, R1, imm);
    930		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
    931		insns[i++] = BPF_EXIT_INSN();
    932	}
    933
    934	return i;
    935}
    936
    937static int __bpf_emit_alu32_imm(struct bpf_test *self, void *arg,
    938				struct bpf_insn *insns, s64 dst, s64 imm)
    939{
    940	int op = *(int *)arg;
    941	int i = 0;
    942	u64 res;
    943
    944	if (!insns)
    945		return 7;
    946
    947	if (__bpf_alu_result(&res, (u32)dst, (u32)imm, op)) {
    948		i += __bpf_ld_imm64(&insns[i], R1, dst);
    949		i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
    950		insns[i++] = BPF_ALU32_IMM(op, R1, imm);
    951		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
    952		insns[i++] = BPF_EXIT_INSN();
    953	}
    954
    955	return i;
    956}
    957
    958static int __bpf_emit_alu64_reg(struct bpf_test *self, void *arg,
    959				struct bpf_insn *insns, s64 dst, s64 src)
    960{
    961	int op = *(int *)arg;
    962	int i = 0;
    963	u64 res;
    964
    965	if (!insns)
    966		return 9;
    967
    968	if (__bpf_alu_result(&res, dst, src, op)) {
    969		i += __bpf_ld_imm64(&insns[i], R1, dst);
    970		i += __bpf_ld_imm64(&insns[i], R2, src);
    971		i += __bpf_ld_imm64(&insns[i], R3, res);
    972		insns[i++] = BPF_ALU64_REG(op, R1, R2);
    973		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
    974		insns[i++] = BPF_EXIT_INSN();
    975	}
    976
    977	return i;
    978}
    979
    980static int __bpf_emit_alu32_reg(struct bpf_test *self, void *arg,
    981				struct bpf_insn *insns, s64 dst, s64 src)
    982{
    983	int op = *(int *)arg;
    984	int i = 0;
    985	u64 res;
    986
    987	if (!insns)
    988		return 9;
    989
    990	if (__bpf_alu_result(&res, (u32)dst, (u32)src, op)) {
    991		i += __bpf_ld_imm64(&insns[i], R1, dst);
    992		i += __bpf_ld_imm64(&insns[i], R2, src);
    993		i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
    994		insns[i++] = BPF_ALU32_REG(op, R1, R2);
    995		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
    996		insns[i++] = BPF_EXIT_INSN();
    997	}
    998
    999	return i;
   1000}
   1001
   1002static int __bpf_fill_alu64_imm(struct bpf_test *self, int op)
   1003{
   1004	return __bpf_fill_pattern(self, &op, 64, 32,
   1005				  PATTERN_BLOCK1, PATTERN_BLOCK2,
   1006				  &__bpf_emit_alu64_imm);
   1007}
   1008
   1009static int __bpf_fill_alu32_imm(struct bpf_test *self, int op)
   1010{
   1011	return __bpf_fill_pattern(self, &op, 64, 32,
   1012				  PATTERN_BLOCK1, PATTERN_BLOCK2,
   1013				  &__bpf_emit_alu32_imm);
   1014}
   1015
   1016static int __bpf_fill_alu64_reg(struct bpf_test *self, int op)
   1017{
   1018	return __bpf_fill_pattern(self, &op, 64, 64,
   1019				  PATTERN_BLOCK1, PATTERN_BLOCK2,
   1020				  &__bpf_emit_alu64_reg);
   1021}
   1022
   1023static int __bpf_fill_alu32_reg(struct bpf_test *self, int op)
   1024{
   1025	return __bpf_fill_pattern(self, &op, 64, 64,
   1026				  PATTERN_BLOCK1, PATTERN_BLOCK2,
   1027				  &__bpf_emit_alu32_reg);
   1028}
   1029
   1030/* ALU64 immediate operations */
   1031static int bpf_fill_alu64_mov_imm(struct bpf_test *self)
   1032{
   1033	return __bpf_fill_alu64_imm(self, BPF_MOV);
   1034}
   1035
   1036static int bpf_fill_alu64_and_imm(struct bpf_test *self)
   1037{
   1038	return __bpf_fill_alu64_imm(self, BPF_AND);
   1039}
   1040
   1041static int bpf_fill_alu64_or_imm(struct bpf_test *self)
   1042{
   1043	return __bpf_fill_alu64_imm(self, BPF_OR);
   1044}
   1045
   1046static int bpf_fill_alu64_xor_imm(struct bpf_test *self)
   1047{
   1048	return __bpf_fill_alu64_imm(self, BPF_XOR);
   1049}
   1050
   1051static int bpf_fill_alu64_add_imm(struct bpf_test *self)
   1052{
   1053	return __bpf_fill_alu64_imm(self, BPF_ADD);
   1054}
   1055
   1056static int bpf_fill_alu64_sub_imm(struct bpf_test *self)
   1057{
   1058	return __bpf_fill_alu64_imm(self, BPF_SUB);
   1059}
   1060
   1061static int bpf_fill_alu64_mul_imm(struct bpf_test *self)
   1062{
   1063	return __bpf_fill_alu64_imm(self, BPF_MUL);
   1064}
   1065
   1066static int bpf_fill_alu64_div_imm(struct bpf_test *self)
   1067{
   1068	return __bpf_fill_alu64_imm(self, BPF_DIV);
   1069}
   1070
   1071static int bpf_fill_alu64_mod_imm(struct bpf_test *self)
   1072{
   1073	return __bpf_fill_alu64_imm(self, BPF_MOD);
   1074}
   1075
   1076/* ALU32 immediate operations */
   1077static int bpf_fill_alu32_mov_imm(struct bpf_test *self)
   1078{
   1079	return __bpf_fill_alu32_imm(self, BPF_MOV);
   1080}
   1081
   1082static int bpf_fill_alu32_and_imm(struct bpf_test *self)
   1083{
   1084	return __bpf_fill_alu32_imm(self, BPF_AND);
   1085}
   1086
   1087static int bpf_fill_alu32_or_imm(struct bpf_test *self)
   1088{
   1089	return __bpf_fill_alu32_imm(self, BPF_OR);
   1090}
   1091
   1092static int bpf_fill_alu32_xor_imm(struct bpf_test *self)
   1093{
   1094	return __bpf_fill_alu32_imm(self, BPF_XOR);
   1095}
   1096
   1097static int bpf_fill_alu32_add_imm(struct bpf_test *self)
   1098{
   1099	return __bpf_fill_alu32_imm(self, BPF_ADD);
   1100}
   1101
   1102static int bpf_fill_alu32_sub_imm(struct bpf_test *self)
   1103{
   1104	return __bpf_fill_alu32_imm(self, BPF_SUB);
   1105}
   1106
   1107static int bpf_fill_alu32_mul_imm(struct bpf_test *self)
   1108{
   1109	return __bpf_fill_alu32_imm(self, BPF_MUL);
   1110}
   1111
   1112static int bpf_fill_alu32_div_imm(struct bpf_test *self)
   1113{
   1114	return __bpf_fill_alu32_imm(self, BPF_DIV);
   1115}
   1116
   1117static int bpf_fill_alu32_mod_imm(struct bpf_test *self)
   1118{
   1119	return __bpf_fill_alu32_imm(self, BPF_MOD);
   1120}
   1121
   1122/* ALU64 register operations */
   1123static int bpf_fill_alu64_mov_reg(struct bpf_test *self)
   1124{
   1125	return __bpf_fill_alu64_reg(self, BPF_MOV);
   1126}
   1127
   1128static int bpf_fill_alu64_and_reg(struct bpf_test *self)
   1129{
   1130	return __bpf_fill_alu64_reg(self, BPF_AND);
   1131}
   1132
   1133static int bpf_fill_alu64_or_reg(struct bpf_test *self)
   1134{
   1135	return __bpf_fill_alu64_reg(self, BPF_OR);
   1136}
   1137
   1138static int bpf_fill_alu64_xor_reg(struct bpf_test *self)
   1139{
   1140	return __bpf_fill_alu64_reg(self, BPF_XOR);
   1141}
   1142
   1143static int bpf_fill_alu64_add_reg(struct bpf_test *self)
   1144{
   1145	return __bpf_fill_alu64_reg(self, BPF_ADD);
   1146}
   1147
   1148static int bpf_fill_alu64_sub_reg(struct bpf_test *self)
   1149{
   1150	return __bpf_fill_alu64_reg(self, BPF_SUB);
   1151}
   1152
   1153static int bpf_fill_alu64_mul_reg(struct bpf_test *self)
   1154{
   1155	return __bpf_fill_alu64_reg(self, BPF_MUL);
   1156}
   1157
   1158static int bpf_fill_alu64_div_reg(struct bpf_test *self)
   1159{
   1160	return __bpf_fill_alu64_reg(self, BPF_DIV);
   1161}
   1162
   1163static int bpf_fill_alu64_mod_reg(struct bpf_test *self)
   1164{
   1165	return __bpf_fill_alu64_reg(self, BPF_MOD);
   1166}
   1167
   1168/* ALU32 register operations */
   1169static int bpf_fill_alu32_mov_reg(struct bpf_test *self)
   1170{
   1171	return __bpf_fill_alu32_reg(self, BPF_MOV);
   1172}
   1173
   1174static int bpf_fill_alu32_and_reg(struct bpf_test *self)
   1175{
   1176	return __bpf_fill_alu32_reg(self, BPF_AND);
   1177}
   1178
   1179static int bpf_fill_alu32_or_reg(struct bpf_test *self)
   1180{
   1181	return __bpf_fill_alu32_reg(self, BPF_OR);
   1182}
   1183
   1184static int bpf_fill_alu32_xor_reg(struct bpf_test *self)
   1185{
   1186	return __bpf_fill_alu32_reg(self, BPF_XOR);
   1187}
   1188
   1189static int bpf_fill_alu32_add_reg(struct bpf_test *self)
   1190{
   1191	return __bpf_fill_alu32_reg(self, BPF_ADD);
   1192}
   1193
   1194static int bpf_fill_alu32_sub_reg(struct bpf_test *self)
   1195{
   1196	return __bpf_fill_alu32_reg(self, BPF_SUB);
   1197}
   1198
   1199static int bpf_fill_alu32_mul_reg(struct bpf_test *self)
   1200{
   1201	return __bpf_fill_alu32_reg(self, BPF_MUL);
   1202}
   1203
   1204static int bpf_fill_alu32_div_reg(struct bpf_test *self)
   1205{
   1206	return __bpf_fill_alu32_reg(self, BPF_DIV);
   1207}
   1208
   1209static int bpf_fill_alu32_mod_reg(struct bpf_test *self)
   1210{
   1211	return __bpf_fill_alu32_reg(self, BPF_MOD);
   1212}
   1213
   1214/*
   1215 * Test JITs that implement complex ALU operations as function
   1216 * calls, and must re-arrange operands for argument passing.
   1217 */
   1218static int __bpf_fill_alu_imm_regs(struct bpf_test *self, u8 op, bool alu32)
   1219{
   1220	int len = 2 + 10 * 10;
   1221	struct bpf_insn *insns;
   1222	u64 dst, res;
   1223	int i = 0;
   1224	u32 imm;
   1225	int rd;
   1226
   1227	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
   1228	if (!insns)
   1229		return -ENOMEM;
   1230
   1231	/* Operand and result values according to operation */
   1232	if (alu32)
   1233		dst = 0x76543210U;
   1234	else
   1235		dst = 0x7edcba9876543210ULL;
   1236	imm = 0x01234567U;
   1237
   1238	if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
   1239		imm &= 31;
   1240
   1241	__bpf_alu_result(&res, dst, imm, op);
   1242
   1243	if (alu32)
   1244		res = (u32)res;
   1245
   1246	/* Check all operand registers */
   1247	for (rd = R0; rd <= R9; rd++) {
   1248		i += __bpf_ld_imm64(&insns[i], rd, dst);
   1249
   1250		if (alu32)
   1251			insns[i++] = BPF_ALU32_IMM(op, rd, imm);
   1252		else
   1253			insns[i++] = BPF_ALU64_IMM(op, rd, imm);
   1254
   1255		insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res, 2);
   1256		insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
   1257		insns[i++] = BPF_EXIT_INSN();
   1258
   1259		insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
   1260		insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res >> 32, 2);
   1261		insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
   1262		insns[i++] = BPF_EXIT_INSN();
   1263	}
   1264
   1265	insns[i++] = BPF_MOV64_IMM(R0, 1);
   1266	insns[i++] = BPF_EXIT_INSN();
   1267
   1268	self->u.ptr.insns = insns;
   1269	self->u.ptr.len = len;
   1270	BUG_ON(i != len);
   1271
   1272	return 0;
   1273}
   1274
   1275/* ALU64 K registers */
   1276static int bpf_fill_alu64_mov_imm_regs(struct bpf_test *self)
   1277{
   1278	return __bpf_fill_alu_imm_regs(self, BPF_MOV, false);
   1279}
   1280
   1281static int bpf_fill_alu64_and_imm_regs(struct bpf_test *self)
   1282{
   1283	return __bpf_fill_alu_imm_regs(self, BPF_AND, false);
   1284}
   1285
   1286static int bpf_fill_alu64_or_imm_regs(struct bpf_test *self)
   1287{
   1288	return __bpf_fill_alu_imm_regs(self, BPF_OR, false);
   1289}
   1290
   1291static int bpf_fill_alu64_xor_imm_regs(struct bpf_test *self)
   1292{
   1293	return __bpf_fill_alu_imm_regs(self, BPF_XOR, false);
   1294}
   1295
   1296static int bpf_fill_alu64_lsh_imm_regs(struct bpf_test *self)
   1297{
   1298	return __bpf_fill_alu_imm_regs(self, BPF_LSH, false);
   1299}
   1300
   1301static int bpf_fill_alu64_rsh_imm_regs(struct bpf_test *self)
   1302{
   1303	return __bpf_fill_alu_imm_regs(self, BPF_RSH, false);
   1304}
   1305
   1306static int bpf_fill_alu64_arsh_imm_regs(struct bpf_test *self)
   1307{
   1308	return __bpf_fill_alu_imm_regs(self, BPF_ARSH, false);
   1309}
   1310
   1311static int bpf_fill_alu64_add_imm_regs(struct bpf_test *self)
   1312{
   1313	return __bpf_fill_alu_imm_regs(self, BPF_ADD, false);
   1314}
   1315
   1316static int bpf_fill_alu64_sub_imm_regs(struct bpf_test *self)
   1317{
   1318	return __bpf_fill_alu_imm_regs(self, BPF_SUB, false);
   1319}
   1320
   1321static int bpf_fill_alu64_mul_imm_regs(struct bpf_test *self)
   1322{
   1323	return __bpf_fill_alu_imm_regs(self, BPF_MUL, false);
   1324}
   1325
   1326static int bpf_fill_alu64_div_imm_regs(struct bpf_test *self)
   1327{
   1328	return __bpf_fill_alu_imm_regs(self, BPF_DIV, false);
   1329}
   1330
   1331static int bpf_fill_alu64_mod_imm_regs(struct bpf_test *self)
   1332{
   1333	return __bpf_fill_alu_imm_regs(self, BPF_MOD, false);
   1334}
   1335
   1336/* ALU32 K registers */
   1337static int bpf_fill_alu32_mov_imm_regs(struct bpf_test *self)
   1338{
   1339	return __bpf_fill_alu_imm_regs(self, BPF_MOV, true);
   1340}
   1341
   1342static int bpf_fill_alu32_and_imm_regs(struct bpf_test *self)
   1343{
   1344	return __bpf_fill_alu_imm_regs(self, BPF_AND, true);
   1345}
   1346
   1347static int bpf_fill_alu32_or_imm_regs(struct bpf_test *self)
   1348{
   1349	return __bpf_fill_alu_imm_regs(self, BPF_OR, true);
   1350}
   1351
   1352static int bpf_fill_alu32_xor_imm_regs(struct bpf_test *self)
   1353{
   1354	return __bpf_fill_alu_imm_regs(self, BPF_XOR, true);
   1355}
   1356
   1357static int bpf_fill_alu32_lsh_imm_regs(struct bpf_test *self)
   1358{
   1359	return __bpf_fill_alu_imm_regs(self, BPF_LSH, true);
   1360}
   1361
   1362static int bpf_fill_alu32_rsh_imm_regs(struct bpf_test *self)
   1363{
   1364	return __bpf_fill_alu_imm_regs(self, BPF_RSH, true);
   1365}
   1366
   1367static int bpf_fill_alu32_arsh_imm_regs(struct bpf_test *self)
   1368{
   1369	return __bpf_fill_alu_imm_regs(self, BPF_ARSH, true);
   1370}
   1371
   1372static int bpf_fill_alu32_add_imm_regs(struct bpf_test *self)
   1373{
   1374	return __bpf_fill_alu_imm_regs(self, BPF_ADD, true);
   1375}
   1376
   1377static int bpf_fill_alu32_sub_imm_regs(struct bpf_test *self)
   1378{
   1379	return __bpf_fill_alu_imm_regs(self, BPF_SUB, true);
   1380}
   1381
   1382static int bpf_fill_alu32_mul_imm_regs(struct bpf_test *self)
   1383{
   1384	return __bpf_fill_alu_imm_regs(self, BPF_MUL, true);
   1385}
   1386
   1387static int bpf_fill_alu32_div_imm_regs(struct bpf_test *self)
   1388{
   1389	return __bpf_fill_alu_imm_regs(self, BPF_DIV, true);
   1390}
   1391
   1392static int bpf_fill_alu32_mod_imm_regs(struct bpf_test *self)
   1393{
   1394	return __bpf_fill_alu_imm_regs(self, BPF_MOD, true);
   1395}
   1396
   1397/*
   1398 * Test JITs that implement complex ALU operations as function
   1399 * calls, and must re-arrange operands for argument passing.
   1400 */
   1401static int __bpf_fill_alu_reg_pairs(struct bpf_test *self, u8 op, bool alu32)
   1402{
   1403	int len = 2 + 10 * 10 * 12;
   1404	u64 dst, src, res, same;
   1405	struct bpf_insn *insns;
   1406	int rd, rs;
   1407	int i = 0;
   1408
   1409	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
   1410	if (!insns)
   1411		return -ENOMEM;
   1412
   1413	/* Operand and result values according to operation */
   1414	if (alu32) {
   1415		dst = 0x76543210U;
   1416		src = 0x01234567U;
   1417	} else {
   1418		dst = 0x7edcba9876543210ULL;
   1419		src = 0x0123456789abcdefULL;
   1420	}
   1421
   1422	if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
   1423		src &= 31;
   1424
   1425	__bpf_alu_result(&res, dst, src, op);
   1426	__bpf_alu_result(&same, src, src, op);
   1427
   1428	if (alu32) {
   1429		res = (u32)res;
   1430		same = (u32)same;
   1431	}
   1432
   1433	/* Check all combinations of operand registers */
   1434	for (rd = R0; rd <= R9; rd++) {
   1435		for (rs = R0; rs <= R9; rs++) {
   1436			u64 val = rd == rs ? same : res;
   1437
   1438			i += __bpf_ld_imm64(&insns[i], rd, dst);
   1439			i += __bpf_ld_imm64(&insns[i], rs, src);
   1440
   1441			if (alu32)
   1442				insns[i++] = BPF_ALU32_REG(op, rd, rs);
   1443			else
   1444				insns[i++] = BPF_ALU64_REG(op, rd, rs);
   1445
   1446			insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val, 2);
   1447			insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
   1448			insns[i++] = BPF_EXIT_INSN();
   1449
   1450			insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
   1451			insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val >> 32, 2);
   1452			insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
   1453			insns[i++] = BPF_EXIT_INSN();
   1454		}
   1455	}
   1456
   1457	insns[i++] = BPF_MOV64_IMM(R0, 1);
   1458	insns[i++] = BPF_EXIT_INSN();
   1459
   1460	self->u.ptr.insns = insns;
   1461	self->u.ptr.len = len;
   1462	BUG_ON(i != len);
   1463
   1464	return 0;
   1465}
   1466
   1467/* ALU64 X register combinations */
   1468static int bpf_fill_alu64_mov_reg_pairs(struct bpf_test *self)
   1469{
   1470	return __bpf_fill_alu_reg_pairs(self, BPF_MOV, false);
   1471}
   1472
   1473static int bpf_fill_alu64_and_reg_pairs(struct bpf_test *self)
   1474{
   1475	return __bpf_fill_alu_reg_pairs(self, BPF_AND, false);
   1476}
   1477
   1478static int bpf_fill_alu64_or_reg_pairs(struct bpf_test *self)
   1479{
   1480	return __bpf_fill_alu_reg_pairs(self, BPF_OR, false);
   1481}
   1482
   1483static int bpf_fill_alu64_xor_reg_pairs(struct bpf_test *self)
   1484{
   1485	return __bpf_fill_alu_reg_pairs(self, BPF_XOR, false);
   1486}
   1487
   1488static int bpf_fill_alu64_lsh_reg_pairs(struct bpf_test *self)
   1489{
   1490	return __bpf_fill_alu_reg_pairs(self, BPF_LSH, false);
   1491}
   1492
   1493static int bpf_fill_alu64_rsh_reg_pairs(struct bpf_test *self)
   1494{
   1495	return __bpf_fill_alu_reg_pairs(self, BPF_RSH, false);
   1496}
   1497
   1498static int bpf_fill_alu64_arsh_reg_pairs(struct bpf_test *self)
   1499{
   1500	return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, false);
   1501}
   1502
   1503static int bpf_fill_alu64_add_reg_pairs(struct bpf_test *self)
   1504{
   1505	return __bpf_fill_alu_reg_pairs(self, BPF_ADD, false);
   1506}
   1507
   1508static int bpf_fill_alu64_sub_reg_pairs(struct bpf_test *self)
   1509{
   1510	return __bpf_fill_alu_reg_pairs(self, BPF_SUB, false);
   1511}
   1512
   1513static int bpf_fill_alu64_mul_reg_pairs(struct bpf_test *self)
   1514{
   1515	return __bpf_fill_alu_reg_pairs(self, BPF_MUL, false);
   1516}
   1517
   1518static int bpf_fill_alu64_div_reg_pairs(struct bpf_test *self)
   1519{
   1520	return __bpf_fill_alu_reg_pairs(self, BPF_DIV, false);
   1521}
   1522
   1523static int bpf_fill_alu64_mod_reg_pairs(struct bpf_test *self)
   1524{
   1525	return __bpf_fill_alu_reg_pairs(self, BPF_MOD, false);
   1526}
   1527
   1528/* ALU32 X register combinations */
   1529static int bpf_fill_alu32_mov_reg_pairs(struct bpf_test *self)
   1530{
   1531	return __bpf_fill_alu_reg_pairs(self, BPF_MOV, true);
   1532}
   1533
   1534static int bpf_fill_alu32_and_reg_pairs(struct bpf_test *self)
   1535{
   1536	return __bpf_fill_alu_reg_pairs(self, BPF_AND, true);
   1537}
   1538
   1539static int bpf_fill_alu32_or_reg_pairs(struct bpf_test *self)
   1540{
   1541	return __bpf_fill_alu_reg_pairs(self, BPF_OR, true);
   1542}
   1543
   1544static int bpf_fill_alu32_xor_reg_pairs(struct bpf_test *self)
   1545{
   1546	return __bpf_fill_alu_reg_pairs(self, BPF_XOR, true);
   1547}
   1548
   1549static int bpf_fill_alu32_lsh_reg_pairs(struct bpf_test *self)
   1550{
   1551	return __bpf_fill_alu_reg_pairs(self, BPF_LSH, true);
   1552}
   1553
   1554static int bpf_fill_alu32_rsh_reg_pairs(struct bpf_test *self)
   1555{
   1556	return __bpf_fill_alu_reg_pairs(self, BPF_RSH, true);
   1557}
   1558
   1559static int bpf_fill_alu32_arsh_reg_pairs(struct bpf_test *self)
   1560{
   1561	return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, true);
   1562}
   1563
   1564static int bpf_fill_alu32_add_reg_pairs(struct bpf_test *self)
   1565{
   1566	return __bpf_fill_alu_reg_pairs(self, BPF_ADD, true);
   1567}
   1568
   1569static int bpf_fill_alu32_sub_reg_pairs(struct bpf_test *self)
   1570{
   1571	return __bpf_fill_alu_reg_pairs(self, BPF_SUB, true);
   1572}
   1573
   1574static int bpf_fill_alu32_mul_reg_pairs(struct bpf_test *self)
   1575{
   1576	return __bpf_fill_alu_reg_pairs(self, BPF_MUL, true);
   1577}
   1578
   1579static int bpf_fill_alu32_div_reg_pairs(struct bpf_test *self)
   1580{
   1581	return __bpf_fill_alu_reg_pairs(self, BPF_DIV, true);
   1582}
   1583
   1584static int bpf_fill_alu32_mod_reg_pairs(struct bpf_test *self)
   1585{
   1586	return __bpf_fill_alu_reg_pairs(self, BPF_MOD, true);
   1587}
   1588
   1589/*
   1590 * Exhaustive tests of atomic operations for all power-of-two operand
   1591 * magnitudes, both for positive and negative values.
   1592 */
   1593
   1594static int __bpf_emit_atomic64(struct bpf_test *self, void *arg,
   1595			       struct bpf_insn *insns, s64 dst, s64 src)
   1596{
   1597	int op = *(int *)arg;
   1598	u64 keep, fetch, res;
   1599	int i = 0;
   1600
   1601	if (!insns)
   1602		return 21;
   1603
   1604	switch (op) {
   1605	case BPF_XCHG:
   1606		res = src;
   1607		break;
   1608	default:
   1609		__bpf_alu_result(&res, dst, src, BPF_OP(op));
   1610	}
   1611
   1612	keep = 0x0123456789abcdefULL;
   1613	if (op & BPF_FETCH)
   1614		fetch = dst;
   1615	else
   1616		fetch = src;
   1617
   1618	i += __bpf_ld_imm64(&insns[i], R0, keep);
   1619	i += __bpf_ld_imm64(&insns[i], R1, dst);
   1620	i += __bpf_ld_imm64(&insns[i], R2, src);
   1621	i += __bpf_ld_imm64(&insns[i], R3, res);
   1622	i += __bpf_ld_imm64(&insns[i], R4, fetch);
   1623	i += __bpf_ld_imm64(&insns[i], R5, keep);
   1624
   1625	insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
   1626	insns[i++] = BPF_ATOMIC_OP(BPF_DW, op, R10, R2, -8);
   1627	insns[i++] = BPF_LDX_MEM(BPF_DW, R1, R10, -8);
   1628
   1629	insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
   1630	insns[i++] = BPF_EXIT_INSN();
   1631
   1632	insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
   1633	insns[i++] = BPF_EXIT_INSN();
   1634
   1635	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
   1636	insns[i++] = BPF_EXIT_INSN();
   1637
   1638	return i;
   1639}
   1640
   1641static int __bpf_emit_atomic32(struct bpf_test *self, void *arg,
   1642			       struct bpf_insn *insns, s64 dst, s64 src)
   1643{
   1644	int op = *(int *)arg;
   1645	u64 keep, fetch, res;
   1646	int i = 0;
   1647
   1648	if (!insns)
   1649		return 21;
   1650
   1651	switch (op) {
   1652	case BPF_XCHG:
   1653		res = src;
   1654		break;
   1655	default:
   1656		__bpf_alu_result(&res, (u32)dst, (u32)src, BPF_OP(op));
   1657	}
   1658
   1659	keep = 0x0123456789abcdefULL;
   1660	if (op & BPF_FETCH)
   1661		fetch = (u32)dst;
   1662	else
   1663		fetch = src;
   1664
   1665	i += __bpf_ld_imm64(&insns[i], R0, keep);
   1666	i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
   1667	i += __bpf_ld_imm64(&insns[i], R2, src);
   1668	i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
   1669	i += __bpf_ld_imm64(&insns[i], R4, fetch);
   1670	i += __bpf_ld_imm64(&insns[i], R5, keep);
   1671
   1672	insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
   1673	insns[i++] = BPF_ATOMIC_OP(BPF_W, op, R10, R2, -4);
   1674	insns[i++] = BPF_LDX_MEM(BPF_W, R1, R10, -4);
   1675
   1676	insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
   1677	insns[i++] = BPF_EXIT_INSN();
   1678
   1679	insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
   1680	insns[i++] = BPF_EXIT_INSN();
   1681
   1682	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
   1683	insns[i++] = BPF_EXIT_INSN();
   1684
   1685	return i;
   1686}
   1687
   1688static int __bpf_emit_cmpxchg64(struct bpf_test *self, void *arg,
   1689				struct bpf_insn *insns, s64 dst, s64 src)
   1690{
   1691	int i = 0;
   1692
   1693	if (!insns)
   1694		return 23;
   1695
   1696	i += __bpf_ld_imm64(&insns[i], R0, ~dst);
   1697	i += __bpf_ld_imm64(&insns[i], R1, dst);
   1698	i += __bpf_ld_imm64(&insns[i], R2, src);
   1699
   1700	/* Result unsuccessful */
   1701	insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
   1702	insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
   1703	insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
   1704
   1705	insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 2);
   1706	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
   1707	insns[i++] = BPF_EXIT_INSN();
   1708
   1709	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
   1710	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
   1711	insns[i++] = BPF_EXIT_INSN();
   1712
   1713	/* Result successful */
   1714	insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
   1715	insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
   1716
   1717	insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R3, 2);
   1718	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
   1719	insns[i++] = BPF_EXIT_INSN();
   1720
   1721	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
   1722	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
   1723	insns[i++] = BPF_EXIT_INSN();
   1724
   1725	return i;
   1726}
   1727
   1728static int __bpf_emit_cmpxchg32(struct bpf_test *self, void *arg,
   1729				struct bpf_insn *insns, s64 dst, s64 src)
   1730{
   1731	int i = 0;
   1732
   1733	if (!insns)
   1734		return 27;
   1735
   1736	i += __bpf_ld_imm64(&insns[i], R0, ~dst);
   1737	i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
   1738	i += __bpf_ld_imm64(&insns[i], R2, src);
   1739
   1740	/* Result unsuccessful */
   1741	insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
   1742	insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
   1743	insns[i++] = BPF_ZEXT_REG(R0), /* Zext always inserted by verifier */
   1744	insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
   1745
   1746	insns[i++] = BPF_JMP32_REG(BPF_JEQ, R1, R3, 2);
   1747	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
   1748	insns[i++] = BPF_EXIT_INSN();
   1749
   1750	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
   1751	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
   1752	insns[i++] = BPF_EXIT_INSN();
   1753
   1754	/* Result successful */
   1755	i += __bpf_ld_imm64(&insns[i], R0, dst);
   1756	insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
   1757	insns[i++] = BPF_ZEXT_REG(R0), /* Zext always inserted by verifier */
   1758	insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
   1759
   1760	insns[i++] = BPF_JMP32_REG(BPF_JEQ, R2, R3, 2);
   1761	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
   1762	insns[i++] = BPF_EXIT_INSN();
   1763
   1764	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
   1765	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
   1766	insns[i++] = BPF_EXIT_INSN();
   1767
   1768	return i;
   1769}
   1770
   1771static int __bpf_fill_atomic64(struct bpf_test *self, int op)
   1772{
   1773	return __bpf_fill_pattern(self, &op, 64, 64,
   1774				  0, PATTERN_BLOCK2,
   1775				  &__bpf_emit_atomic64);
   1776}
   1777
   1778static int __bpf_fill_atomic32(struct bpf_test *self, int op)
   1779{
   1780	return __bpf_fill_pattern(self, &op, 64, 64,
   1781				  0, PATTERN_BLOCK2,
   1782				  &__bpf_emit_atomic32);
   1783}
   1784
   1785/* 64-bit atomic operations */
   1786static int bpf_fill_atomic64_add(struct bpf_test *self)
   1787{
   1788	return __bpf_fill_atomic64(self, BPF_ADD);
   1789}
   1790
   1791static int bpf_fill_atomic64_and(struct bpf_test *self)
   1792{
   1793	return __bpf_fill_atomic64(self, BPF_AND);
   1794}
   1795
   1796static int bpf_fill_atomic64_or(struct bpf_test *self)
   1797{
   1798	return __bpf_fill_atomic64(self, BPF_OR);
   1799}
   1800
   1801static int bpf_fill_atomic64_xor(struct bpf_test *self)
   1802{
   1803	return __bpf_fill_atomic64(self, BPF_XOR);
   1804}
   1805
   1806static int bpf_fill_atomic64_add_fetch(struct bpf_test *self)
   1807{
   1808	return __bpf_fill_atomic64(self, BPF_ADD | BPF_FETCH);
   1809}
   1810
   1811static int bpf_fill_atomic64_and_fetch(struct bpf_test *self)
   1812{
   1813	return __bpf_fill_atomic64(self, BPF_AND | BPF_FETCH);
   1814}
   1815
   1816static int bpf_fill_atomic64_or_fetch(struct bpf_test *self)
   1817{
   1818	return __bpf_fill_atomic64(self, BPF_OR | BPF_FETCH);
   1819}
   1820
   1821static int bpf_fill_atomic64_xor_fetch(struct bpf_test *self)
   1822{
   1823	return __bpf_fill_atomic64(self, BPF_XOR | BPF_FETCH);
   1824}
   1825
   1826static int bpf_fill_atomic64_xchg(struct bpf_test *self)
   1827{
   1828	return __bpf_fill_atomic64(self, BPF_XCHG);
   1829}
   1830
   1831static int bpf_fill_cmpxchg64(struct bpf_test *self)
   1832{
   1833	return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
   1834				  &__bpf_emit_cmpxchg64);
   1835}
   1836
   1837/* 32-bit atomic operations */
   1838static int bpf_fill_atomic32_add(struct bpf_test *self)
   1839{
   1840	return __bpf_fill_atomic32(self, BPF_ADD);
   1841}
   1842
   1843static int bpf_fill_atomic32_and(struct bpf_test *self)
   1844{
   1845	return __bpf_fill_atomic32(self, BPF_AND);
   1846}
   1847
   1848static int bpf_fill_atomic32_or(struct bpf_test *self)
   1849{
   1850	return __bpf_fill_atomic32(self, BPF_OR);
   1851}
   1852
   1853static int bpf_fill_atomic32_xor(struct bpf_test *self)
   1854{
   1855	return __bpf_fill_atomic32(self, BPF_XOR);
   1856}
   1857
   1858static int bpf_fill_atomic32_add_fetch(struct bpf_test *self)
   1859{
   1860	return __bpf_fill_atomic32(self, BPF_ADD | BPF_FETCH);
   1861}
   1862
   1863static int bpf_fill_atomic32_and_fetch(struct bpf_test *self)
   1864{
   1865	return __bpf_fill_atomic32(self, BPF_AND | BPF_FETCH);
   1866}
   1867
   1868static int bpf_fill_atomic32_or_fetch(struct bpf_test *self)
   1869{
   1870	return __bpf_fill_atomic32(self, BPF_OR | BPF_FETCH);
   1871}
   1872
   1873static int bpf_fill_atomic32_xor_fetch(struct bpf_test *self)
   1874{
   1875	return __bpf_fill_atomic32(self, BPF_XOR | BPF_FETCH);
   1876}
   1877
   1878static int bpf_fill_atomic32_xchg(struct bpf_test *self)
   1879{
   1880	return __bpf_fill_atomic32(self, BPF_XCHG);
   1881}
   1882
   1883static int bpf_fill_cmpxchg32(struct bpf_test *self)
   1884{
   1885	return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
   1886				  &__bpf_emit_cmpxchg32);
   1887}
   1888
   1889/*
   1890 * Test JITs that implement ATOMIC operations as function calls or
   1891 * other primitives, and must re-arrange operands for argument passing.
   1892 */
   1893static int __bpf_fill_atomic_reg_pairs(struct bpf_test *self, u8 width, u8 op)
   1894{
   1895	struct bpf_insn *insn;
   1896	int len = 2 + 34 * 10 * 10;
   1897	u64 mem, upd, res;
   1898	int rd, rs, i = 0;
   1899
   1900	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
   1901	if (!insn)
   1902		return -ENOMEM;
   1903
   1904	/* Operand and memory values */
   1905	if (width == BPF_DW) {
   1906		mem = 0x0123456789abcdefULL;
   1907		upd = 0xfedcba9876543210ULL;
   1908	} else { /* BPF_W */
   1909		mem = 0x01234567U;
   1910		upd = 0x76543210U;
   1911	}
   1912
   1913	/* Memory updated according to operation */
   1914	switch (op) {
   1915	case BPF_XCHG:
   1916		res = upd;
   1917		break;
   1918	case BPF_CMPXCHG:
   1919		res = mem;
   1920		break;
   1921	default:
   1922		__bpf_alu_result(&res, mem, upd, BPF_OP(op));
   1923	}
   1924
   1925	/* Test all operand registers */
   1926	for (rd = R0; rd <= R9; rd++) {
   1927		for (rs = R0; rs <= R9; rs++) {
   1928			u64 cmp, src;
   1929
   1930			/* Initialize value in memory */
   1931			i += __bpf_ld_imm64(&insn[i], R0, mem);
   1932			insn[i++] = BPF_STX_MEM(width, R10, R0, -8);
   1933
   1934			/* Initialize registers in order */
   1935			i += __bpf_ld_imm64(&insn[i], R0, ~mem);
   1936			i += __bpf_ld_imm64(&insn[i], rs, upd);
   1937			insn[i++] = BPF_MOV64_REG(rd, R10);
   1938
   1939			/* Perform atomic operation */
   1940			insn[i++] = BPF_ATOMIC_OP(width, op, rd, rs, -8);
   1941			if (op == BPF_CMPXCHG && width == BPF_W)
   1942				insn[i++] = BPF_ZEXT_REG(R0);
   1943
   1944			/* Check R0 register value */
   1945			if (op == BPF_CMPXCHG)
   1946				cmp = mem;  /* Expect value from memory */
   1947			else if (R0 == rd || R0 == rs)
   1948				cmp = 0;    /* Aliased, checked below */
   1949			else
   1950				cmp = ~mem; /* Expect value to be preserved */
   1951			if (cmp) {
   1952				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
   1953							   (u32)cmp, 2);
   1954				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
   1955				insn[i++] = BPF_EXIT_INSN();
   1956				insn[i++] = BPF_ALU64_IMM(BPF_RSH, R0, 32);
   1957				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
   1958							   cmp >> 32, 2);
   1959				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
   1960				insn[i++] = BPF_EXIT_INSN();
   1961			}
   1962
   1963			/* Check source register value */
   1964			if (rs == R0 && op == BPF_CMPXCHG)
   1965				src = 0;   /* Aliased with R0, checked above */
   1966			else if (rs == rd && (op == BPF_CMPXCHG ||
   1967					      !(op & BPF_FETCH)))
   1968				src = 0;   /* Aliased with rd, checked below */
   1969			else if (op == BPF_CMPXCHG)
   1970				src = upd; /* Expect value to be preserved */
   1971			else if (op & BPF_FETCH)
   1972				src = mem; /* Expect fetched value from mem */
   1973			else /* no fetch */
   1974				src = upd; /* Expect value to be preserved */
   1975			if (src) {
   1976				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
   1977							   (u32)src, 2);
   1978				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
   1979				insn[i++] = BPF_EXIT_INSN();
   1980				insn[i++] = BPF_ALU64_IMM(BPF_RSH, rs, 32);
   1981				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
   1982							   src >> 32, 2);
   1983				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
   1984				insn[i++] = BPF_EXIT_INSN();
   1985			}
   1986
   1987			/* Check destination register value */
   1988			if (!(rd == R0 && op == BPF_CMPXCHG) &&
   1989			    !(rd == rs && (op & BPF_FETCH))) {
   1990				insn[i++] = BPF_JMP_REG(BPF_JEQ, rd, R10, 2);
   1991				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
   1992				insn[i++] = BPF_EXIT_INSN();
   1993			}
   1994
   1995			/* Check value in memory */
   1996			if (rs != rd) {                  /* No aliasing */
   1997				i += __bpf_ld_imm64(&insn[i], R1, res);
   1998			} else if (op == BPF_XCHG) {     /* Aliased, XCHG */
   1999				insn[i++] = BPF_MOV64_REG(R1, R10);
   2000			} else if (op == BPF_CMPXCHG) {  /* Aliased, CMPXCHG */
   2001				i += __bpf_ld_imm64(&insn[i], R1, mem);
   2002			} else {                        /* Aliased, ALU oper */
   2003				i += __bpf_ld_imm64(&insn[i], R1, mem);
   2004				insn[i++] = BPF_ALU64_REG(BPF_OP(op), R1, R10);
   2005			}
   2006
   2007			insn[i++] = BPF_LDX_MEM(width, R0, R10, -8);
   2008			if (width == BPF_DW)
   2009				insn[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
   2010			else /* width == BPF_W */
   2011				insn[i++] = BPF_JMP32_REG(BPF_JEQ, R0, R1, 2);
   2012			insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
   2013			insn[i++] = BPF_EXIT_INSN();
   2014		}
   2015	}
   2016
   2017	insn[i++] = BPF_MOV64_IMM(R0, 1);
   2018	insn[i++] = BPF_EXIT_INSN();
   2019
   2020	self->u.ptr.insns = insn;
   2021	self->u.ptr.len = i;
   2022	BUG_ON(i > len);
   2023
   2024	return 0;
   2025}
   2026
   2027/* 64-bit atomic register tests */
   2028static int bpf_fill_atomic64_add_reg_pairs(struct bpf_test *self)
   2029{
   2030	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD);
   2031}
   2032
   2033static int bpf_fill_atomic64_and_reg_pairs(struct bpf_test *self)
   2034{
   2035	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND);
   2036}
   2037
   2038static int bpf_fill_atomic64_or_reg_pairs(struct bpf_test *self)
   2039{
   2040	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR);
   2041}
   2042
   2043static int bpf_fill_atomic64_xor_reg_pairs(struct bpf_test *self)
   2044{
   2045	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR);
   2046}
   2047
   2048static int bpf_fill_atomic64_add_fetch_reg_pairs(struct bpf_test *self)
   2049{
   2050	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD | BPF_FETCH);
   2051}
   2052
   2053static int bpf_fill_atomic64_and_fetch_reg_pairs(struct bpf_test *self)
   2054{
   2055	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND | BPF_FETCH);
   2056}
   2057
   2058static int bpf_fill_atomic64_or_fetch_reg_pairs(struct bpf_test *self)
   2059{
   2060	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR | BPF_FETCH);
   2061}
   2062
   2063static int bpf_fill_atomic64_xor_fetch_reg_pairs(struct bpf_test *self)
   2064{
   2065	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR | BPF_FETCH);
   2066}
   2067
   2068static int bpf_fill_atomic64_xchg_reg_pairs(struct bpf_test *self)
   2069{
   2070	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XCHG);
   2071}
   2072
   2073static int bpf_fill_atomic64_cmpxchg_reg_pairs(struct bpf_test *self)
   2074{
   2075	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_CMPXCHG);
   2076}
   2077
   2078/* 32-bit atomic register tests */
   2079static int bpf_fill_atomic32_add_reg_pairs(struct bpf_test *self)
   2080{
   2081	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD);
   2082}
   2083
   2084static int bpf_fill_atomic32_and_reg_pairs(struct bpf_test *self)
   2085{
   2086	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND);
   2087}
   2088
   2089static int bpf_fill_atomic32_or_reg_pairs(struct bpf_test *self)
   2090{
   2091	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR);
   2092}
   2093
   2094static int bpf_fill_atomic32_xor_reg_pairs(struct bpf_test *self)
   2095{
   2096	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR);
   2097}
   2098
   2099static int bpf_fill_atomic32_add_fetch_reg_pairs(struct bpf_test *self)
   2100{
   2101	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD | BPF_FETCH);
   2102}
   2103
   2104static int bpf_fill_atomic32_and_fetch_reg_pairs(struct bpf_test *self)
   2105{
   2106	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND | BPF_FETCH);
   2107}
   2108
   2109static int bpf_fill_atomic32_or_fetch_reg_pairs(struct bpf_test *self)
   2110{
   2111	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR | BPF_FETCH);
   2112}
   2113
   2114static int bpf_fill_atomic32_xor_fetch_reg_pairs(struct bpf_test *self)
   2115{
   2116	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR | BPF_FETCH);
   2117}
   2118
   2119static int bpf_fill_atomic32_xchg_reg_pairs(struct bpf_test *self)
   2120{
   2121	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XCHG);
   2122}
   2123
   2124static int bpf_fill_atomic32_cmpxchg_reg_pairs(struct bpf_test *self)
   2125{
   2126	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_CMPXCHG);
   2127}
   2128
   2129/*
   2130 * Test the two-instruction 64-bit immediate load operation for all
   2131 * power-of-two magnitudes of the immediate operand. For each MSB, a block
   2132 * of immediate values centered around the power-of-two MSB are tested,
   2133 * both for positive and negative values. The test is designed to verify
   2134 * the operation for JITs that emit different code depending on the magnitude
   2135 * of the immediate value. This is often the case if the native instruction
   2136 * immediate field width is narrower than 32 bits.
   2137 */
   2138static int bpf_fill_ld_imm64_magn(struct bpf_test *self)
   2139{
   2140	int block = 64; /* Increase for more tests per MSB position */
   2141	int len = 3 + 8 * 63 * block * 2;
   2142	struct bpf_insn *insn;
   2143	int bit, adj, sign;
   2144	int i = 0;
   2145
   2146	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
   2147	if (!insn)
   2148		return -ENOMEM;
   2149
   2150	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
   2151
   2152	for (bit = 0; bit <= 62; bit++) {
   2153		for (adj = -block / 2; adj < block / 2; adj++) {
   2154			for (sign = -1; sign <= 1; sign += 2) {
   2155				s64 imm = sign * ((1LL << bit) + adj);
   2156
   2157				/* Perform operation */
   2158				i += __bpf_ld_imm64(&insn[i], R1, imm);
   2159
   2160				/* Load reference */
   2161				insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
   2162				insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3,
   2163							  (u32)(imm >> 32));
   2164				insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
   2165				insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
   2166
   2167				/* Check result */
   2168				insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
   2169				insn[i++] = BPF_EXIT_INSN();
   2170			}
   2171		}
   2172	}
   2173
   2174	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
   2175	insn[i++] = BPF_EXIT_INSN();
   2176
   2177	self->u.ptr.insns = insn;
   2178	self->u.ptr.len = len;
   2179	BUG_ON(i != len);
   2180
   2181	return 0;
   2182}
   2183
   2184/*
   2185 * Test the two-instruction 64-bit immediate load operation for different
   2186 * combinations of bytes. Each byte in the 64-bit word is constructed as
   2187 * (base & mask) | (rand() & ~mask), where rand() is a deterministic LCG.
   2188 * All patterns (base1, mask1) and (base2, mask2) bytes are tested.
   2189 */
   2190static int __bpf_fill_ld_imm64_bytes(struct bpf_test *self,
   2191				     u8 base1, u8 mask1,
   2192				     u8 base2, u8 mask2)
   2193{
   2194	struct bpf_insn *insn;
   2195	int len = 3 + 8 * BIT(8);
   2196	int pattern, index;
   2197	u32 rand = 1;
   2198	int i = 0;
   2199
   2200	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
   2201	if (!insn)
   2202		return -ENOMEM;
   2203
   2204	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
   2205
   2206	for (pattern = 0; pattern < BIT(8); pattern++) {
   2207		u64 imm = 0;
   2208
   2209		for (index = 0; index < 8; index++) {
   2210			int byte;
   2211
   2212			if (pattern & BIT(index))
   2213				byte = (base1 & mask1) | (rand & ~mask1);
   2214			else
   2215				byte = (base2 & mask2) | (rand & ~mask2);
   2216			imm = (imm << 8) | byte;
   2217		}
   2218
   2219		/* Update our LCG */
   2220		rand = rand * 1664525 + 1013904223;
   2221
   2222		/* Perform operation */
   2223		i += __bpf_ld_imm64(&insn[i], R1, imm);
   2224
   2225		/* Load reference */
   2226		insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
   2227		insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3, (u32)(imm >> 32));
   2228		insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
   2229		insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
   2230
   2231		/* Check result */
   2232		insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
   2233		insn[i++] = BPF_EXIT_INSN();
   2234	}
   2235
   2236	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
   2237	insn[i++] = BPF_EXIT_INSN();
   2238
   2239	self->u.ptr.insns = insn;
   2240	self->u.ptr.len = len;
   2241	BUG_ON(i != len);
   2242
   2243	return 0;
   2244}
   2245
   2246static int bpf_fill_ld_imm64_checker(struct bpf_test *self)
   2247{
   2248	return __bpf_fill_ld_imm64_bytes(self, 0, 0xff, 0xff, 0xff);
   2249}
   2250
   2251static int bpf_fill_ld_imm64_pos_neg(struct bpf_test *self)
   2252{
   2253	return __bpf_fill_ld_imm64_bytes(self, 1, 0x81, 0x80, 0x80);
   2254}
   2255
   2256static int bpf_fill_ld_imm64_pos_zero(struct bpf_test *self)
   2257{
   2258	return __bpf_fill_ld_imm64_bytes(self, 1, 0x81, 0, 0xff);
   2259}
   2260
   2261static int bpf_fill_ld_imm64_neg_zero(struct bpf_test *self)
   2262{
   2263	return __bpf_fill_ld_imm64_bytes(self, 0x80, 0x80, 0, 0xff);
   2264}
   2265
   2266/*
   2267 * Exhaustive tests of JMP operations for all combinations of power-of-two
   2268 * magnitudes of the operands, both for positive and negative values. The
   2269 * test is designed to verify e.g. the JMP and JMP32 operations for JITs that
   2270 * emit different code depending on the magnitude of the immediate value.
   2271 */
   2272
   2273static bool __bpf_match_jmp_cond(s64 v1, s64 v2, u8 op)
   2274{
   2275	switch (op) {
   2276	case BPF_JSET:
   2277		return !!(v1 & v2);
   2278	case BPF_JEQ:
   2279		return v1 == v2;
   2280	case BPF_JNE:
   2281		return v1 != v2;
   2282	case BPF_JGT:
   2283		return (u64)v1 > (u64)v2;
   2284	case BPF_JGE:
   2285		return (u64)v1 >= (u64)v2;
   2286	case BPF_JLT:
   2287		return (u64)v1 < (u64)v2;
   2288	case BPF_JLE:
   2289		return (u64)v1 <= (u64)v2;
   2290	case BPF_JSGT:
   2291		return v1 > v2;
   2292	case BPF_JSGE:
   2293		return v1 >= v2;
   2294	case BPF_JSLT:
   2295		return v1 < v2;
   2296	case BPF_JSLE:
   2297		return v1 <= v2;
   2298	}
   2299	return false;
   2300}
   2301
   2302static int __bpf_emit_jmp_imm(struct bpf_test *self, void *arg,
   2303			      struct bpf_insn *insns, s64 dst, s64 imm)
   2304{
   2305	int op = *(int *)arg;
   2306
   2307	if (insns) {
   2308		bool match = __bpf_match_jmp_cond(dst, (s32)imm, op);
   2309		int i = 0;
   2310
   2311		insns[i++] = BPF_ALU32_IMM(BPF_MOV, R0, match);
   2312
   2313		i += __bpf_ld_imm64(&insns[i], R1, dst);
   2314		insns[i++] = BPF_JMP_IMM(op, R1, imm, 1);
   2315		if (!match)
   2316			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
   2317		insns[i++] = BPF_EXIT_INSN();
   2318
   2319		return i;
   2320	}
   2321
   2322	return 5 + 1;
   2323}
   2324
   2325static int __bpf_emit_jmp32_imm(struct bpf_test *self, void *arg,
   2326				struct bpf_insn *insns, s64 dst, s64 imm)
   2327{
   2328	int op = *(int *)arg;
   2329
   2330	if (insns) {
   2331		bool match = __bpf_match_jmp_cond((s32)dst, (s32)imm, op);
   2332		int i = 0;
   2333
   2334		i += __bpf_ld_imm64(&insns[i], R1, dst);
   2335		insns[i++] = BPF_JMP32_IMM(op, R1, imm, 1);
   2336		if (!match)
   2337			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
   2338		insns[i++] = BPF_EXIT_INSN();
   2339
   2340		return i;
   2341	}
   2342
   2343	return 5;
   2344}
   2345
   2346static int __bpf_emit_jmp_reg(struct bpf_test *self, void *arg,
   2347			      struct bpf_insn *insns, s64 dst, s64 src)
   2348{
   2349	int op = *(int *)arg;
   2350
   2351	if (insns) {
   2352		bool match = __bpf_match_jmp_cond(dst, src, op);
   2353		int i = 0;
   2354
   2355		i += __bpf_ld_imm64(&insns[i], R1, dst);
   2356		i += __bpf_ld_imm64(&insns[i], R2, src);
   2357		insns[i++] = BPF_JMP_REG(op, R1, R2, 1);
   2358		if (!match)
   2359			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
   2360		insns[i++] = BPF_EXIT_INSN();
   2361
   2362		return i;
   2363	}
   2364
   2365	return 7;
   2366}
   2367
   2368static int __bpf_emit_jmp32_reg(struct bpf_test *self, void *arg,
   2369				struct bpf_insn *insns, s64 dst, s64 src)
   2370{
   2371	int op = *(int *)arg;
   2372
   2373	if (insns) {
   2374		bool match = __bpf_match_jmp_cond((s32)dst, (s32)src, op);
   2375		int i = 0;
   2376
   2377		i += __bpf_ld_imm64(&insns[i], R1, dst);
   2378		i += __bpf_ld_imm64(&insns[i], R2, src);
   2379		insns[i++] = BPF_JMP32_REG(op, R1, R2, 1);
   2380		if (!match)
   2381			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
   2382		insns[i++] = BPF_EXIT_INSN();
   2383
   2384		return i;
   2385	}
   2386
   2387	return 7;
   2388}
   2389
   2390static int __bpf_fill_jmp_imm(struct bpf_test *self, int op)
   2391{
   2392	return __bpf_fill_pattern(self, &op, 64, 32,
   2393				  PATTERN_BLOCK1, PATTERN_BLOCK2,
   2394				  &__bpf_emit_jmp_imm);
   2395}
   2396
   2397static int __bpf_fill_jmp32_imm(struct bpf_test *self, int op)
   2398{
   2399	return __bpf_fill_pattern(self, &op, 64, 32,
   2400				  PATTERN_BLOCK1, PATTERN_BLOCK2,
   2401				  &__bpf_emit_jmp32_imm);
   2402}
   2403
   2404static int __bpf_fill_jmp_reg(struct bpf_test *self, int op)
   2405{
   2406	return __bpf_fill_pattern(self, &op, 64, 64,
   2407				  PATTERN_BLOCK1, PATTERN_BLOCK2,
   2408				  &__bpf_emit_jmp_reg);
   2409}
   2410
   2411static int __bpf_fill_jmp32_reg(struct bpf_test *self, int op)
   2412{
   2413	return __bpf_fill_pattern(self, &op, 64, 64,
   2414				  PATTERN_BLOCK1, PATTERN_BLOCK2,
   2415				  &__bpf_emit_jmp32_reg);
   2416}
   2417
   2418/* JMP immediate tests */
   2419static int bpf_fill_jmp_jset_imm(struct bpf_test *self)
   2420{
   2421	return __bpf_fill_jmp_imm(self, BPF_JSET);
   2422}
   2423
   2424static int bpf_fill_jmp_jeq_imm(struct bpf_test *self)
   2425{
   2426	return __bpf_fill_jmp_imm(self, BPF_JEQ);
   2427}
   2428
   2429static int bpf_fill_jmp_jne_imm(struct bpf_test *self)
   2430{
   2431	return __bpf_fill_jmp_imm(self, BPF_JNE);
   2432}
   2433
   2434static int bpf_fill_jmp_jgt_imm(struct bpf_test *self)
   2435{
   2436	return __bpf_fill_jmp_imm(self, BPF_JGT);
   2437}
   2438
   2439static int bpf_fill_jmp_jge_imm(struct bpf_test *self)
   2440{
   2441	return __bpf_fill_jmp_imm(self, BPF_JGE);
   2442}
   2443
   2444static int bpf_fill_jmp_jlt_imm(struct bpf_test *self)
   2445{
   2446	return __bpf_fill_jmp_imm(self, BPF_JLT);
   2447}
   2448
   2449static int bpf_fill_jmp_jle_imm(struct bpf_test *self)
   2450{
   2451	return __bpf_fill_jmp_imm(self, BPF_JLE);
   2452}
   2453
   2454static int bpf_fill_jmp_jsgt_imm(struct bpf_test *self)
   2455{
   2456	return __bpf_fill_jmp_imm(self, BPF_JSGT);
   2457}
   2458
   2459static int bpf_fill_jmp_jsge_imm(struct bpf_test *self)
   2460{
   2461	return __bpf_fill_jmp_imm(self, BPF_JSGE);
   2462}
   2463
   2464static int bpf_fill_jmp_jslt_imm(struct bpf_test *self)
   2465{
   2466	return __bpf_fill_jmp_imm(self, BPF_JSLT);
   2467}
   2468
   2469static int bpf_fill_jmp_jsle_imm(struct bpf_test *self)
   2470{
   2471	return __bpf_fill_jmp_imm(self, BPF_JSLE);
   2472}
   2473
   2474/* JMP32 immediate tests */
   2475static int bpf_fill_jmp32_jset_imm(struct bpf_test *self)
   2476{
   2477	return __bpf_fill_jmp32_imm(self, BPF_JSET);
   2478}
   2479
   2480static int bpf_fill_jmp32_jeq_imm(struct bpf_test *self)
   2481{
   2482	return __bpf_fill_jmp32_imm(self, BPF_JEQ);
   2483}
   2484
   2485static int bpf_fill_jmp32_jne_imm(struct bpf_test *self)
   2486{
   2487	return __bpf_fill_jmp32_imm(self, BPF_JNE);
   2488}
   2489
   2490static int bpf_fill_jmp32_jgt_imm(struct bpf_test *self)
   2491{
   2492	return __bpf_fill_jmp32_imm(self, BPF_JGT);
   2493}
   2494
   2495static int bpf_fill_jmp32_jge_imm(struct bpf_test *self)
   2496{
   2497	return __bpf_fill_jmp32_imm(self, BPF_JGE);
   2498}
   2499
   2500static int bpf_fill_jmp32_jlt_imm(struct bpf_test *self)
   2501{
   2502	return __bpf_fill_jmp32_imm(self, BPF_JLT);
   2503}
   2504
   2505static int bpf_fill_jmp32_jle_imm(struct bpf_test *self)
   2506{
   2507	return __bpf_fill_jmp32_imm(self, BPF_JLE);
   2508}
   2509
   2510static int bpf_fill_jmp32_jsgt_imm(struct bpf_test *self)
   2511{
   2512	return __bpf_fill_jmp32_imm(self, BPF_JSGT);
   2513}
   2514
   2515static int bpf_fill_jmp32_jsge_imm(struct bpf_test *self)
   2516{
   2517	return __bpf_fill_jmp32_imm(self, BPF_JSGE);
   2518}
   2519
   2520static int bpf_fill_jmp32_jslt_imm(struct bpf_test *self)
   2521{
   2522	return __bpf_fill_jmp32_imm(self, BPF_JSLT);
   2523}
   2524
   2525static int bpf_fill_jmp32_jsle_imm(struct bpf_test *self)
   2526{
   2527	return __bpf_fill_jmp32_imm(self, BPF_JSLE);
   2528}
   2529
   2530/* JMP register tests */
   2531static int bpf_fill_jmp_jset_reg(struct bpf_test *self)
   2532{
   2533	return __bpf_fill_jmp_reg(self, BPF_JSET);
   2534}
   2535
   2536static int bpf_fill_jmp_jeq_reg(struct bpf_test *self)
   2537{
   2538	return __bpf_fill_jmp_reg(self, BPF_JEQ);
   2539}
   2540
   2541static int bpf_fill_jmp_jne_reg(struct bpf_test *self)
   2542{
   2543	return __bpf_fill_jmp_reg(self, BPF_JNE);
   2544}
   2545
   2546static int bpf_fill_jmp_jgt_reg(struct bpf_test *self)
   2547{
   2548	return __bpf_fill_jmp_reg(self, BPF_JGT);
   2549}
   2550
   2551static int bpf_fill_jmp_jge_reg(struct bpf_test *self)
   2552{
   2553	return __bpf_fill_jmp_reg(self, BPF_JGE);
   2554}
   2555
   2556static int bpf_fill_jmp_jlt_reg(struct bpf_test *self)
   2557{
   2558	return __bpf_fill_jmp_reg(self, BPF_JLT);
   2559}
   2560
   2561static int bpf_fill_jmp_jle_reg(struct bpf_test *self)
   2562{
   2563	return __bpf_fill_jmp_reg(self, BPF_JLE);
   2564}
   2565
   2566static int bpf_fill_jmp_jsgt_reg(struct bpf_test *self)
   2567{
   2568	return __bpf_fill_jmp_reg(self, BPF_JSGT);
   2569}
   2570
   2571static int bpf_fill_jmp_jsge_reg(struct bpf_test *self)
   2572{
   2573	return __bpf_fill_jmp_reg(self, BPF_JSGE);
   2574}
   2575
   2576static int bpf_fill_jmp_jslt_reg(struct bpf_test *self)
   2577{
   2578	return __bpf_fill_jmp_reg(self, BPF_JSLT);
   2579}
   2580
   2581static int bpf_fill_jmp_jsle_reg(struct bpf_test *self)
   2582{
   2583	return __bpf_fill_jmp_reg(self, BPF_JSLE);
   2584}
   2585
   2586/* JMP32 register tests */
   2587static int bpf_fill_jmp32_jset_reg(struct bpf_test *self)
   2588{
   2589	return __bpf_fill_jmp32_reg(self, BPF_JSET);
   2590}
   2591
   2592static int bpf_fill_jmp32_jeq_reg(struct bpf_test *self)
   2593{
   2594	return __bpf_fill_jmp32_reg(self, BPF_JEQ);
   2595}
   2596
   2597static int bpf_fill_jmp32_jne_reg(struct bpf_test *self)
   2598{
   2599	return __bpf_fill_jmp32_reg(self, BPF_JNE);
   2600}
   2601
   2602static int bpf_fill_jmp32_jgt_reg(struct bpf_test *self)
   2603{
   2604	return __bpf_fill_jmp32_reg(self, BPF_JGT);
   2605}
   2606
   2607static int bpf_fill_jmp32_jge_reg(struct bpf_test *self)
   2608{
   2609	return __bpf_fill_jmp32_reg(self, BPF_JGE);
   2610}
   2611
   2612static int bpf_fill_jmp32_jlt_reg(struct bpf_test *self)
   2613{
   2614	return __bpf_fill_jmp32_reg(self, BPF_JLT);
   2615}
   2616
   2617static int bpf_fill_jmp32_jle_reg(struct bpf_test *self)
   2618{
   2619	return __bpf_fill_jmp32_reg(self, BPF_JLE);
   2620}
   2621
   2622static int bpf_fill_jmp32_jsgt_reg(struct bpf_test *self)
   2623{
   2624	return __bpf_fill_jmp32_reg(self, BPF_JSGT);
   2625}
   2626
   2627static int bpf_fill_jmp32_jsge_reg(struct bpf_test *self)
   2628{
   2629	return __bpf_fill_jmp32_reg(self, BPF_JSGE);
   2630}
   2631
   2632static int bpf_fill_jmp32_jslt_reg(struct bpf_test *self)
   2633{
   2634	return __bpf_fill_jmp32_reg(self, BPF_JSLT);
   2635}
   2636
   2637static int bpf_fill_jmp32_jsle_reg(struct bpf_test *self)
   2638{
   2639	return __bpf_fill_jmp32_reg(self, BPF_JSLE);
   2640}
   2641
   2642/*
   2643 * Set up a sequence of staggered jumps, forwards and backwards with
   2644 * increasing offset. This tests the conversion of relative jumps to
   2645 * JITed native jumps. On some architectures, for example MIPS, a large
   2646 * PC-relative jump offset may overflow the immediate field of the native
   2647 * conditional branch instruction, triggering a conversion to use an
   2648 * absolute jump instead. Since this changes the jump offsets, another
   2649 * offset computation pass is necessary, and that may in turn trigger
   2650 * another branch conversion. This jump sequence is particularly nasty
   2651 * in that regard.
   2652 *
   2653 * The sequence generation is parameterized by size and jump type.
   2654 * The size must be even, and the expected result is always size + 1.
   2655 * Below is an example with size=8 and result=9.
   2656 *
   2657 *                     ________________________Start
   2658 *                     R0 = 0
   2659 *                     R1 = r1
   2660 *                     R2 = r2
   2661 *            ,------- JMP +4 * 3______________Preamble: 4 insns
   2662 * ,----------|-ind 0- if R0 != 7 JMP 8 * 3 + 1 <--------------------.
   2663 * |          |        R0 = 8                                        |
   2664 * |          |        JMP +7 * 3               ------------------------.
   2665 * | ,--------|-----1- if R0 != 5 JMP 7 * 3 + 1 <--------------.     |  |
   2666 * | |        |        R0 = 6                                  |     |  |
   2667 * | |        |        JMP +5 * 3               ------------------.  |  |
   2668 * | | ,------|-----2- if R0 != 3 JMP 6 * 3 + 1 <--------.     |  |  |  |
   2669 * | | |      |        R0 = 4                            |     |  |  |  |
   2670 * | | |      |        JMP +3 * 3               ------------.  |  |  |  |
   2671 * | | | ,----|-----3- if R0 != 1 JMP 5 * 3 + 1 <--.     |  |  |  |  |  |
   2672 * | | | |    |        R0 = 2                      |     |  |  |  |  |  |
   2673 * | | | |    |        JMP +1 * 3               ------.  |  |  |  |  |  |
   2674 * | | | | ,--t=====4> if R0 != 0 JMP 4 * 3 + 1    1  2  3  4  5  6  7  8 loc
   2675 * | | | | |           R0 = 1                     -1 +2 -3 +4 -5 +6 -7 +8 off
   2676 * | | | | |           JMP -2 * 3               ---'  |  |  |  |  |  |  |
   2677 * | | | | | ,------5- if R0 != 2 JMP 3 * 3 + 1 <-----'  |  |  |  |  |  |
   2678 * | | | | | |         R0 = 3                            |  |  |  |  |  |
   2679 * | | | | | |         JMP -4 * 3               ---------'  |  |  |  |  |
   2680 * | | | | | | ,----6- if R0 != 4 JMP 2 * 3 + 1 <-----------'  |  |  |  |
   2681 * | | | | | | |       R0 = 5                                  |  |  |  |
   2682 * | | | | | | |       JMP -6 * 3               ---------------'  |  |  |
   2683 * | | | | | | | ,--7- if R0 != 6 JMP 1 * 3 + 1 <-----------------'  |  |
   2684 * | | | | | | | |     R0 = 7                                        |  |
   2685 * | | Error | | |     JMP -8 * 3               ---------------------'  |
   2686 * | | paths | | | ,8- if R0 != 8 JMP 0 * 3 + 1 <-----------------------'
   2687 * | | | | | | | | |   R0 = 9__________________Sequence: 3 * size - 1 insns
   2688 * `-+-+-+-+-+-+-+-+-> EXIT____________________Return: 1 insn
   2689 *
   2690 */
   2691
   2692/* The maximum size parameter */
   2693#define MAX_STAGGERED_JMP_SIZE ((0x7fff / 3) & ~1)
   2694
   2695/* We use a reduced number of iterations to get a reasonable execution time */
   2696#define NR_STAGGERED_JMP_RUNS 10
   2697
   2698static int __bpf_fill_staggered_jumps(struct bpf_test *self,
   2699				      const struct bpf_insn *jmp,
   2700				      u64 r1, u64 r2)
   2701{
   2702	int size = self->test[0].result - 1;
   2703	int len = 4 + 3 * (size + 1);
   2704	struct bpf_insn *insns;
   2705	int off, ind;
   2706
   2707	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
   2708	if (!insns)
   2709		return -ENOMEM;
   2710
   2711	/* Preamble */
   2712	insns[0] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
   2713	insns[1] = BPF_ALU64_IMM(BPF_MOV, R1, r1);
   2714	insns[2] = BPF_ALU64_IMM(BPF_MOV, R2, r2);
   2715	insns[3] = BPF_JMP_IMM(BPF_JA, 0, 0, 3 * size / 2);
   2716
   2717	/* Sequence */
   2718	for (ind = 0, off = size; ind <= size; ind++, off -= 2) {
   2719		struct bpf_insn *ins = &insns[4 + 3 * ind];
   2720		int loc;
   2721
   2722		if (off == 0)
   2723			off--;
   2724
   2725		loc = abs(off);
   2726		ins[0] = BPF_JMP_IMM(BPF_JNE, R0, loc - 1,
   2727				     3 * (size - ind) + 1);
   2728		ins[1] = BPF_ALU64_IMM(BPF_MOV, R0, loc);
   2729		ins[2] = *jmp;
   2730		ins[2].off = 3 * (off - 1);
   2731	}
   2732
   2733	/* Return */
   2734	insns[len - 1] = BPF_EXIT_INSN();
   2735
   2736	self->u.ptr.insns = insns;
   2737	self->u.ptr.len = len;
   2738
   2739	return 0;
   2740}
   2741
   2742/* 64-bit unconditional jump */
   2743static int bpf_fill_staggered_ja(struct bpf_test *self)
   2744{
   2745	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JA, 0, 0, 0);
   2746
   2747	return __bpf_fill_staggered_jumps(self, &jmp, 0, 0);
   2748}
   2749
   2750/* 64-bit immediate jumps */
   2751static int bpf_fill_staggered_jeq_imm(struct bpf_test *self)
   2752{
   2753	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JEQ, R1, 1234, 0);
   2754
   2755	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
   2756}
   2757
   2758static int bpf_fill_staggered_jne_imm(struct bpf_test *self)
   2759{
   2760	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JNE, R1, 1234, 0);
   2761
   2762	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
   2763}
   2764
   2765static int bpf_fill_staggered_jset_imm(struct bpf_test *self)
   2766{
   2767	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSET, R1, 0x82, 0);
   2768
   2769	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
   2770}
   2771
   2772static int bpf_fill_staggered_jgt_imm(struct bpf_test *self)
   2773{
   2774	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGT, R1, 1234, 0);
   2775
   2776	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
   2777}
   2778
   2779static int bpf_fill_staggered_jge_imm(struct bpf_test *self)
   2780{
   2781	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGE, R1, 1234, 0);
   2782
   2783	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
   2784}
   2785
   2786static int bpf_fill_staggered_jlt_imm(struct bpf_test *self)
   2787{
   2788	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLT, R1, 0x80000000, 0);
   2789
   2790	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
   2791}
   2792
   2793static int bpf_fill_staggered_jle_imm(struct bpf_test *self)
   2794{
   2795	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLE, R1, 1234, 0);
   2796
   2797	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
   2798}
   2799
   2800static int bpf_fill_staggered_jsgt_imm(struct bpf_test *self)
   2801{
   2802	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGT, R1, -2, 0);
   2803
   2804	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
   2805}
   2806
   2807static int bpf_fill_staggered_jsge_imm(struct bpf_test *self)
   2808{
   2809	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGE, R1, -2, 0);
   2810
   2811	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
   2812}
   2813
   2814static int bpf_fill_staggered_jslt_imm(struct bpf_test *self)
   2815{
   2816	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLT, R1, -1, 0);
   2817
   2818	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
   2819}
   2820
   2821static int bpf_fill_staggered_jsle_imm(struct bpf_test *self)
   2822{
   2823	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLE, R1, -1, 0);
   2824
   2825	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
   2826}
   2827
   2828/* 64-bit register jumps */
   2829static int bpf_fill_staggered_jeq_reg(struct bpf_test *self)
   2830{
   2831	struct bpf_insn jmp = BPF_JMP_REG(BPF_JEQ, R1, R2, 0);
   2832
   2833	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
   2834}
   2835
   2836static int bpf_fill_staggered_jne_reg(struct bpf_test *self)
   2837{
   2838	struct bpf_insn jmp = BPF_JMP_REG(BPF_JNE, R1, R2, 0);
   2839
   2840	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
   2841}
   2842
   2843static int bpf_fill_staggered_jset_reg(struct bpf_test *self)
   2844{
   2845	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSET, R1, R2, 0);
   2846
   2847	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
   2848}
   2849
   2850static int bpf_fill_staggered_jgt_reg(struct bpf_test *self)
   2851{
   2852	struct bpf_insn jmp = BPF_JMP_REG(BPF_JGT, R1, R2, 0);
   2853
   2854	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
   2855}
   2856
   2857static int bpf_fill_staggered_jge_reg(struct bpf_test *self)
   2858{
   2859	struct bpf_insn jmp = BPF_JMP_REG(BPF_JGE, R1, R2, 0);
   2860
   2861	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
   2862}
   2863
   2864static int bpf_fill_staggered_jlt_reg(struct bpf_test *self)
   2865{
   2866	struct bpf_insn jmp = BPF_JMP_REG(BPF_JLT, R1, R2, 0);
   2867
   2868	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
   2869}
   2870
   2871static int bpf_fill_staggered_jle_reg(struct bpf_test *self)
   2872{
   2873	struct bpf_insn jmp = BPF_JMP_REG(BPF_JLE, R1, R2, 0);
   2874
   2875	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
   2876}
   2877
   2878static int bpf_fill_staggered_jsgt_reg(struct bpf_test *self)
   2879{
   2880	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGT, R1, R2, 0);
   2881
   2882	return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
   2883}
   2884
   2885static int bpf_fill_staggered_jsge_reg(struct bpf_test *self)
   2886{
   2887	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGE, R1, R2, 0);
   2888
   2889	return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
   2890}
   2891
   2892static int bpf_fill_staggered_jslt_reg(struct bpf_test *self)
   2893{
   2894	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLT, R1, R2, 0);
   2895
   2896	return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
   2897}
   2898
   2899static int bpf_fill_staggered_jsle_reg(struct bpf_test *self)
   2900{
   2901	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLE, R1, R2, 0);
   2902
   2903	return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
   2904}
   2905
   2906/* 32-bit immediate jumps */
   2907static int bpf_fill_staggered_jeq32_imm(struct bpf_test *self)
   2908{
   2909	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JEQ, R1, 1234, 0);
   2910
   2911	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
   2912}
   2913
   2914static int bpf_fill_staggered_jne32_imm(struct bpf_test *self)
   2915{
   2916	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JNE, R1, 1234, 0);
   2917
   2918	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
   2919}
   2920
   2921static int bpf_fill_staggered_jset32_imm(struct bpf_test *self)
   2922{
   2923	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSET, R1, 0x82, 0);
   2924
   2925	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
   2926}
   2927
   2928static int bpf_fill_staggered_jgt32_imm(struct bpf_test *self)
   2929{
   2930	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGT, R1, 1234, 0);
   2931
   2932	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
   2933}
   2934
   2935static int bpf_fill_staggered_jge32_imm(struct bpf_test *self)
   2936{
   2937	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGE, R1, 1234, 0);
   2938
   2939	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
   2940}
   2941
   2942static int bpf_fill_staggered_jlt32_imm(struct bpf_test *self)
   2943{
   2944	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLT, R1, 0x80000000, 0);
   2945
   2946	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
   2947}
   2948
   2949static int bpf_fill_staggered_jle32_imm(struct bpf_test *self)
   2950{
   2951	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLE, R1, 1234, 0);
   2952
   2953	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
   2954}
   2955
   2956static int bpf_fill_staggered_jsgt32_imm(struct bpf_test *self)
   2957{
   2958	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGT, R1, -2, 0);
   2959
   2960	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
   2961}
   2962
   2963static int bpf_fill_staggered_jsge32_imm(struct bpf_test *self)
   2964{
   2965	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGE, R1, -2, 0);
   2966
   2967	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
   2968}
   2969
   2970static int bpf_fill_staggered_jslt32_imm(struct bpf_test *self)
   2971{
   2972	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLT, R1, -1, 0);
   2973
   2974	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
   2975}
   2976
   2977static int bpf_fill_staggered_jsle32_imm(struct bpf_test *self)
   2978{
   2979	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLE, R1, -1, 0);
   2980
   2981	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
   2982}
   2983
   2984/* 32-bit register jumps */
   2985static int bpf_fill_staggered_jeq32_reg(struct bpf_test *self)
   2986{
   2987	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JEQ, R1, R2, 0);
   2988
   2989	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
   2990}
   2991
   2992static int bpf_fill_staggered_jne32_reg(struct bpf_test *self)
   2993{
   2994	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JNE, R1, R2, 0);
   2995
   2996	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
   2997}
   2998
   2999static int bpf_fill_staggered_jset32_reg(struct bpf_test *self)
   3000{
   3001	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSET, R1, R2, 0);
   3002
   3003	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
   3004}
   3005
   3006static int bpf_fill_staggered_jgt32_reg(struct bpf_test *self)
   3007{
   3008	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGT, R1, R2, 0);
   3009
   3010	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
   3011}
   3012
   3013static int bpf_fill_staggered_jge32_reg(struct bpf_test *self)
   3014{
   3015	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGE, R1, R2, 0);
   3016
   3017	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
   3018}
   3019
   3020static int bpf_fill_staggered_jlt32_reg(struct bpf_test *self)
   3021{
   3022	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLT, R1, R2, 0);
   3023
   3024	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
   3025}
   3026
   3027static int bpf_fill_staggered_jle32_reg(struct bpf_test *self)
   3028{
   3029	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLE, R1, R2, 0);
   3030
   3031	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
   3032}
   3033
   3034static int bpf_fill_staggered_jsgt32_reg(struct bpf_test *self)
   3035{
   3036	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGT, R1, R2, 0);
   3037
   3038	return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
   3039}
   3040
   3041static int bpf_fill_staggered_jsge32_reg(struct bpf_test *self)
   3042{
   3043	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGE, R1, R2, 0);
   3044
   3045	return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
   3046}
   3047
   3048static int bpf_fill_staggered_jslt32_reg(struct bpf_test *self)
   3049{
   3050	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLT, R1, R2, 0);
   3051
   3052	return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
   3053}
   3054
   3055static int bpf_fill_staggered_jsle32_reg(struct bpf_test *self)
   3056{
   3057	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLE, R1, R2, 0);
   3058
   3059	return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
   3060}
   3061
   3062
   3063static struct bpf_test tests[] = {
   3064	{
   3065		"TAX",
   3066		.u.insns = {
   3067			BPF_STMT(BPF_LD | BPF_IMM, 1),
   3068			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   3069			BPF_STMT(BPF_LD | BPF_IMM, 2),
   3070			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   3071			BPF_STMT(BPF_ALU | BPF_NEG, 0), /* A == -3 */
   3072			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   3073			BPF_STMT(BPF_LD | BPF_LEN, 0),
   3074			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   3075			BPF_STMT(BPF_MISC | BPF_TAX, 0), /* X == len - 3 */
   3076			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 1),
   3077			BPF_STMT(BPF_RET | BPF_A, 0)
   3078		},
   3079		CLASSIC,
   3080		{ 10, 20, 30, 40, 50 },
   3081		{ { 2, 10 }, { 3, 20 }, { 4, 30 } },
   3082	},
   3083	{
   3084		"TXA",
   3085		.u.insns = {
   3086			BPF_STMT(BPF_LDX | BPF_LEN, 0),
   3087			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   3088			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   3089			BPF_STMT(BPF_RET | BPF_A, 0) /* A == len * 2 */
   3090		},
   3091		CLASSIC,
   3092		{ 10, 20, 30, 40, 50 },
   3093		{ { 1, 2 }, { 3, 6 }, { 4, 8 } },
   3094	},
   3095	{
   3096		"ADD_SUB_MUL_K",
   3097		.u.insns = {
   3098			BPF_STMT(BPF_LD | BPF_IMM, 1),
   3099			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 2),
   3100			BPF_STMT(BPF_LDX | BPF_IMM, 3),
   3101			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
   3102			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0xffffffff),
   3103			BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 3),
   3104			BPF_STMT(BPF_RET | BPF_A, 0)
   3105		},
   3106		CLASSIC | FLAG_NO_DATA,
   3107		{ },
   3108		{ { 0, 0xfffffffd } }
   3109	},
   3110	{
   3111		"DIV_MOD_KX",
   3112		.u.insns = {
   3113			BPF_STMT(BPF_LD | BPF_IMM, 8),
   3114			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 2),
   3115			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   3116			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
   3117			BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
   3118			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   3119			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
   3120			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x70000000),
   3121			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   3122			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
   3123			BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
   3124			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   3125			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
   3126			BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x70000000),
   3127			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   3128			BPF_STMT(BPF_RET | BPF_A, 0)
   3129		},
   3130		CLASSIC | FLAG_NO_DATA,
   3131		{ },
   3132		{ { 0, 0x20000000 } }
   3133	},
   3134	{
   3135		"AND_OR_LSH_K",
   3136		.u.insns = {
   3137			BPF_STMT(BPF_LD | BPF_IMM, 0xff),
   3138			BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
   3139			BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 27),
   3140			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   3141			BPF_STMT(BPF_LD | BPF_IMM, 0xf),
   3142			BPF_STMT(BPF_ALU | BPF_OR | BPF_K, 0xf0),
   3143			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   3144			BPF_STMT(BPF_RET | BPF_A, 0)
   3145		},
   3146		CLASSIC | FLAG_NO_DATA,
   3147		{ },
   3148		{ { 0, 0x800000ff }, { 1, 0x800000ff } },
   3149	},
   3150	{
   3151		"LD_IMM_0",
   3152		.u.insns = {
   3153			BPF_STMT(BPF_LD | BPF_IMM, 0), /* ld #0 */
   3154			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0, 1, 0),
   3155			BPF_STMT(BPF_RET | BPF_K, 0),
   3156			BPF_STMT(BPF_RET | BPF_K, 1),
   3157		},
   3158		CLASSIC,
   3159		{ },
   3160		{ { 1, 1 } },
   3161	},
   3162	{
   3163		"LD_IND",
   3164		.u.insns = {
   3165			BPF_STMT(BPF_LDX | BPF_LEN, 0),
   3166			BPF_STMT(BPF_LD | BPF_H | BPF_IND, MAX_K),
   3167			BPF_STMT(BPF_RET | BPF_K, 1)
   3168		},
   3169		CLASSIC,
   3170		{ },
   3171		{ { 1, 0 }, { 10, 0 }, { 60, 0 } },
   3172	},
   3173	{
   3174		"LD_ABS",
   3175		.u.insns = {
   3176			BPF_STMT(BPF_LD | BPF_W | BPF_ABS, 1000),
   3177			BPF_STMT(BPF_RET | BPF_K, 1)
   3178		},
   3179		CLASSIC,
   3180		{ },
   3181		{ { 1, 0 }, { 10, 0 }, { 60, 0 } },
   3182	},
   3183	{
   3184		"LD_ABS_LL",
   3185		.u.insns = {
   3186			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF),
   3187			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   3188			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF + 1),
   3189			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   3190			BPF_STMT(BPF_RET | BPF_A, 0)
   3191		},
   3192		CLASSIC,
   3193		{ 1, 2, 3 },
   3194		{ { 1, 0 }, { 2, 3 } },
   3195	},
   3196	{
   3197		"LD_IND_LL",
   3198		.u.insns = {
   3199			BPF_STMT(BPF_LD | BPF_IMM, SKF_LL_OFF - 1),
   3200			BPF_STMT(BPF_LDX | BPF_LEN, 0),
   3201			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   3202			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   3203			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
   3204			BPF_STMT(BPF_RET | BPF_A, 0)
   3205		},
   3206		CLASSIC,
   3207		{ 1, 2, 3, 0xff },
   3208		{ { 1, 1 }, { 3, 3 }, { 4, 0xff } },
   3209	},
   3210	{
   3211		"LD_ABS_NET",
   3212		.u.insns = {
   3213			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF),
   3214			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   3215			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF + 1),
   3216			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   3217			BPF_STMT(BPF_RET | BPF_A, 0)
   3218		},
   3219		CLASSIC,
   3220		{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
   3221		{ { 15, 0 }, { 16, 3 } },
   3222	},
   3223	{
   3224		"LD_IND_NET",
   3225		.u.insns = {
   3226			BPF_STMT(BPF_LD | BPF_IMM, SKF_NET_OFF - 15),
   3227			BPF_STMT(BPF_LDX | BPF_LEN, 0),
   3228			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   3229			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   3230			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
   3231			BPF_STMT(BPF_RET | BPF_A, 0)
   3232		},
   3233		CLASSIC,
   3234		{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
   3235		{ { 14, 0 }, { 15, 1 }, { 17, 3 } },
   3236	},
   3237	{
   3238		"LD_PKTTYPE",
   3239		.u.insns = {
   3240			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3241				 SKF_AD_OFF + SKF_AD_PKTTYPE),
   3242			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
   3243			BPF_STMT(BPF_RET | BPF_K, 1),
   3244			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3245				 SKF_AD_OFF + SKF_AD_PKTTYPE),
   3246			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
   3247			BPF_STMT(BPF_RET | BPF_K, 1),
   3248			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3249				 SKF_AD_OFF + SKF_AD_PKTTYPE),
   3250			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
   3251			BPF_STMT(BPF_RET | BPF_K, 1),
   3252			BPF_STMT(BPF_RET | BPF_A, 0)
   3253		},
   3254		CLASSIC,
   3255		{ },
   3256		{ { 1, 3 }, { 10, 3 } },
   3257	},
   3258	{
   3259		"LD_MARK",
   3260		.u.insns = {
   3261			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3262				 SKF_AD_OFF + SKF_AD_MARK),
   3263			BPF_STMT(BPF_RET | BPF_A, 0)
   3264		},
   3265		CLASSIC,
   3266		{ },
   3267		{ { 1, SKB_MARK}, { 10, SKB_MARK} },
   3268	},
   3269	{
   3270		"LD_RXHASH",
   3271		.u.insns = {
   3272			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3273				 SKF_AD_OFF + SKF_AD_RXHASH),
   3274			BPF_STMT(BPF_RET | BPF_A, 0)
   3275		},
   3276		CLASSIC,
   3277		{ },
   3278		{ { 1, SKB_HASH}, { 10, SKB_HASH} },
   3279	},
   3280	{
   3281		"LD_QUEUE",
   3282		.u.insns = {
   3283			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3284				 SKF_AD_OFF + SKF_AD_QUEUE),
   3285			BPF_STMT(BPF_RET | BPF_A, 0)
   3286		},
   3287		CLASSIC,
   3288		{ },
   3289		{ { 1, SKB_QUEUE_MAP }, { 10, SKB_QUEUE_MAP } },
   3290	},
   3291	{
   3292		"LD_PROTOCOL",
   3293		.u.insns = {
   3294			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 1),
   3295			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 20, 1, 0),
   3296			BPF_STMT(BPF_RET | BPF_K, 0),
   3297			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3298				 SKF_AD_OFF + SKF_AD_PROTOCOL),
   3299			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   3300			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
   3301			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 30, 1, 0),
   3302			BPF_STMT(BPF_RET | BPF_K, 0),
   3303			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   3304			BPF_STMT(BPF_RET | BPF_A, 0)
   3305		},
   3306		CLASSIC,
   3307		{ 10, 20, 30 },
   3308		{ { 10, ETH_P_IP }, { 100, ETH_P_IP } },
   3309	},
   3310	{
   3311		"LD_VLAN_TAG",
   3312		.u.insns = {
   3313			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3314				 SKF_AD_OFF + SKF_AD_VLAN_TAG),
   3315			BPF_STMT(BPF_RET | BPF_A, 0)
   3316		},
   3317		CLASSIC,
   3318		{ },
   3319		{
   3320			{ 1, SKB_VLAN_TCI },
   3321			{ 10, SKB_VLAN_TCI }
   3322		},
   3323	},
   3324	{
   3325		"LD_VLAN_TAG_PRESENT",
   3326		.u.insns = {
   3327			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3328				 SKF_AD_OFF + SKF_AD_VLAN_TAG_PRESENT),
   3329			BPF_STMT(BPF_RET | BPF_A, 0)
   3330		},
   3331		CLASSIC,
   3332		{ },
   3333		{
   3334			{ 1, SKB_VLAN_PRESENT },
   3335			{ 10, SKB_VLAN_PRESENT }
   3336		},
   3337	},
   3338	{
   3339		"LD_IFINDEX",
   3340		.u.insns = {
   3341			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3342				 SKF_AD_OFF + SKF_AD_IFINDEX),
   3343			BPF_STMT(BPF_RET | BPF_A, 0)
   3344		},
   3345		CLASSIC,
   3346		{ },
   3347		{ { 1, SKB_DEV_IFINDEX }, { 10, SKB_DEV_IFINDEX } },
   3348	},
   3349	{
   3350		"LD_HATYPE",
   3351		.u.insns = {
   3352			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3353				 SKF_AD_OFF + SKF_AD_HATYPE),
   3354			BPF_STMT(BPF_RET | BPF_A, 0)
   3355		},
   3356		CLASSIC,
   3357		{ },
   3358		{ { 1, SKB_DEV_TYPE }, { 10, SKB_DEV_TYPE } },
   3359	},
   3360	{
   3361		"LD_CPU",
   3362		.u.insns = {
   3363			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3364				 SKF_AD_OFF + SKF_AD_CPU),
   3365			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   3366			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3367				 SKF_AD_OFF + SKF_AD_CPU),
   3368			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
   3369			BPF_STMT(BPF_RET | BPF_A, 0)
   3370		},
   3371		CLASSIC,
   3372		{ },
   3373		{ { 1, 0 }, { 10, 0 } },
   3374	},
   3375	{
   3376		"LD_NLATTR",
   3377		.u.insns = {
   3378			BPF_STMT(BPF_LDX | BPF_IMM, 2),
   3379			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   3380			BPF_STMT(BPF_LDX | BPF_IMM, 3),
   3381			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3382				 SKF_AD_OFF + SKF_AD_NLATTR),
   3383			BPF_STMT(BPF_RET | BPF_A, 0)
   3384		},
   3385		CLASSIC,
   3386#ifdef __BIG_ENDIAN
   3387		{ 0xff, 0xff, 0, 4, 0, 2, 0, 4, 0, 3 },
   3388#else
   3389		{ 0xff, 0xff, 4, 0, 2, 0, 4, 0, 3, 0 },
   3390#endif
   3391		{ { 4, 0 }, { 20, 6 } },
   3392	},
   3393	{
   3394		"LD_NLATTR_NEST",
   3395		.u.insns = {
   3396			BPF_STMT(BPF_LD | BPF_IMM, 2),
   3397			BPF_STMT(BPF_LDX | BPF_IMM, 3),
   3398			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3399				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
   3400			BPF_STMT(BPF_LD | BPF_IMM, 2),
   3401			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3402				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
   3403			BPF_STMT(BPF_LD | BPF_IMM, 2),
   3404			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3405				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
   3406			BPF_STMT(BPF_LD | BPF_IMM, 2),
   3407			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3408				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
   3409			BPF_STMT(BPF_LD | BPF_IMM, 2),
   3410			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3411				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
   3412			BPF_STMT(BPF_LD | BPF_IMM, 2),
   3413			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3414				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
   3415			BPF_STMT(BPF_LD | BPF_IMM, 2),
   3416			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3417				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
   3418			BPF_STMT(BPF_LD | BPF_IMM, 2),
   3419			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3420				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
   3421			BPF_STMT(BPF_RET | BPF_A, 0)
   3422		},
   3423		CLASSIC,
   3424#ifdef __BIG_ENDIAN
   3425		{ 0xff, 0xff, 0, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3 },
   3426#else
   3427		{ 0xff, 0xff, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3, 0 },
   3428#endif
   3429		{ { 4, 0 }, { 20, 10 } },
   3430	},
   3431	{
   3432		"LD_PAYLOAD_OFF",
   3433		.u.insns = {
   3434			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3435				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
   3436			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3437				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
   3438			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3439				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
   3440			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3441				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
   3442			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3443				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
   3444			BPF_STMT(BPF_RET | BPF_A, 0)
   3445		},
   3446		CLASSIC,
   3447		/* 00:00:00:00:00:00 > 00:00:00:00:00:00, ethtype IPv4 (0x0800),
   3448		 * length 98: 127.0.0.1 > 127.0.0.1: ICMP echo request,
   3449		 * id 9737, seq 1, length 64
   3450		 */
   3451		{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
   3452		  0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
   3453		  0x08, 0x00,
   3454		  0x45, 0x00, 0x00, 0x54, 0xac, 0x8b, 0x40, 0x00, 0x40,
   3455		  0x01, 0x90, 0x1b, 0x7f, 0x00, 0x00, 0x01 },
   3456		{ { 30, 0 }, { 100, 42 } },
   3457	},
   3458	{
   3459		"LD_ANC_XOR",
   3460		.u.insns = {
   3461			BPF_STMT(BPF_LD | BPF_IMM, 10),
   3462			BPF_STMT(BPF_LDX | BPF_IMM, 300),
   3463			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   3464				 SKF_AD_OFF + SKF_AD_ALU_XOR_X),
   3465			BPF_STMT(BPF_RET | BPF_A, 0)
   3466		},
   3467		CLASSIC,
   3468		{ },
   3469		{ { 4, 0xA ^ 300 }, { 20, 0xA ^ 300 } },
   3470	},
   3471	{
   3472		"SPILL_FILL",
   3473		.u.insns = {
   3474			BPF_STMT(BPF_LDX | BPF_LEN, 0),
   3475			BPF_STMT(BPF_LD | BPF_IMM, 2),
   3476			BPF_STMT(BPF_ALU | BPF_RSH, 1),
   3477			BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
   3478			BPF_STMT(BPF_ST, 1), /* M1 = 1 ^ len */
   3479			BPF_STMT(BPF_ALU | BPF_XOR | BPF_K, 0x80000000),
   3480			BPF_STMT(BPF_ST, 2), /* M2 = 1 ^ len ^ 0x80000000 */
   3481			BPF_STMT(BPF_STX, 15), /* M3 = len */
   3482			BPF_STMT(BPF_LDX | BPF_MEM, 1),
   3483			BPF_STMT(BPF_LD | BPF_MEM, 2),
   3484			BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
   3485			BPF_STMT(BPF_LDX | BPF_MEM, 15),
   3486			BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
   3487			BPF_STMT(BPF_RET | BPF_A, 0)
   3488		},
   3489		CLASSIC,
   3490		{ },
   3491		{ { 1, 0x80000001 }, { 2, 0x80000002 }, { 60, 0x80000000 ^ 60 } }
   3492	},
   3493	{
   3494		"JEQ",
   3495		.u.insns = {
   3496			BPF_STMT(BPF_LDX | BPF_LEN, 0),
   3497			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
   3498			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 0, 1),
   3499			BPF_STMT(BPF_RET | BPF_K, 1),
   3500			BPF_STMT(BPF_RET | BPF_K, MAX_K)
   3501		},
   3502		CLASSIC,
   3503		{ 3, 3, 3, 3, 3 },
   3504		{ { 1, 0 }, { 3, 1 }, { 4, MAX_K } },
   3505	},
   3506	{
   3507		"JGT",
   3508		.u.insns = {
   3509			BPF_STMT(BPF_LDX | BPF_LEN, 0),
   3510			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
   3511			BPF_JUMP(BPF_JMP | BPF_JGT | BPF_X, 0, 0, 1),
   3512			BPF_STMT(BPF_RET | BPF_K, 1),
   3513			BPF_STMT(BPF_RET | BPF_K, MAX_K)
   3514		},
   3515		CLASSIC,
   3516		{ 4, 4, 4, 3, 3 },
   3517		{ { 2, 0 }, { 3, 1 }, { 4, MAX_K } },
   3518	},
   3519	{
   3520		"JGE (jt 0), test 1",
   3521		.u.insns = {
   3522			BPF_STMT(BPF_LDX | BPF_LEN, 0),
   3523			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
   3524			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
   3525			BPF_STMT(BPF_RET | BPF_K, 1),
   3526			BPF_STMT(BPF_RET | BPF_K, MAX_K)
   3527		},
   3528		CLASSIC,
   3529		{ 4, 4, 4, 3, 3 },
   3530		{ { 2, 0 }, { 3, 1 }, { 4, 1 } },
   3531	},
   3532	{
   3533		"JGE (jt 0), test 2",
   3534		.u.insns = {
   3535			BPF_STMT(BPF_LDX | BPF_LEN, 0),
   3536			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
   3537			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
   3538			BPF_STMT(BPF_RET | BPF_K, 1),
   3539			BPF_STMT(BPF_RET | BPF_K, MAX_K)
   3540		},
   3541		CLASSIC,
   3542		{ 4, 4, 5, 3, 3 },
   3543		{ { 4, 1 }, { 5, 1 }, { 6, MAX_K } },
   3544	},
   3545	{
   3546		"JGE",
   3547		.u.insns = {
   3548			BPF_STMT(BPF_LDX | BPF_LEN, 0),
   3549			BPF_STMT(BPF_LD | BPF_B | BPF_IND, MAX_K),
   3550			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 1, 1, 0),
   3551			BPF_STMT(BPF_RET | BPF_K, 10),
   3552			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 2, 1, 0),
   3553			BPF_STMT(BPF_RET | BPF_K, 20),
   3554			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 3, 1, 0),
   3555			BPF_STMT(BPF_RET | BPF_K, 30),
   3556			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 4, 1, 0),
   3557			BPF_STMT(BPF_RET | BPF_K, 40),
   3558			BPF_STMT(BPF_RET | BPF_K, MAX_K)
   3559		},
   3560		CLASSIC,
   3561		{ 1, 2, 3, 4, 5 },
   3562		{ { 1, 20 }, { 3, 40 }, { 5, MAX_K } },
   3563	},
   3564	{
   3565		"JSET",
   3566		.u.insns = {
   3567			BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
   3568			BPF_JUMP(BPF_JMP | BPF_JA, 1, 1, 1),
   3569			BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
   3570			BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
   3571			BPF_STMT(BPF_LDX | BPF_LEN, 0),
   3572			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   3573			BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, 4),
   3574			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   3575			BPF_STMT(BPF_LD | BPF_W | BPF_IND, 0),
   3576			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 1, 0, 1),
   3577			BPF_STMT(BPF_RET | BPF_K, 10),
   3578			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x80000000, 0, 1),
   3579			BPF_STMT(BPF_RET | BPF_K, 20),
   3580			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
   3581			BPF_STMT(BPF_RET | BPF_K, 30),
   3582			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
   3583			BPF_STMT(BPF_RET | BPF_K, 30),
   3584			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
   3585			BPF_STMT(BPF_RET | BPF_K, 30),
   3586			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
   3587			BPF_STMT(BPF_RET | BPF_K, 30),
   3588			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
   3589			BPF_STMT(BPF_RET | BPF_K, 30),
   3590			BPF_STMT(BPF_RET | BPF_K, MAX_K)
   3591		},
   3592		CLASSIC,
   3593		{ 0, 0xAA, 0x55, 1 },
   3594		{ { 4, 10 }, { 5, 20 }, { 6, MAX_K } },
   3595	},
   3596	{
   3597		"tcpdump port 22",
   3598		.u.insns = {
   3599			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
   3600			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 0, 8), /* IPv6 */
   3601			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 20),
   3602			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
   3603			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
   3604			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 17),
   3605			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 54),
   3606			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 14, 0),
   3607			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 56),
   3608			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 12, 13),
   3609			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0800, 0, 12), /* IPv4 */
   3610			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
   3611			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
   3612			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
   3613			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 8),
   3614			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
   3615			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 6, 0),
   3616			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
   3617			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
   3618			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
   3619			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
   3620			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 1),
   3621			BPF_STMT(BPF_RET | BPF_K, 0xffff),
   3622			BPF_STMT(BPF_RET | BPF_K, 0),
   3623		},
   3624		CLASSIC,
   3625		/* 3c:07:54:43:e5:76 > 10:bf:48:d6:43:d6, ethertype IPv4(0x0800)
   3626		 * length 114: 10.1.1.149.49700 > 10.1.2.10.22: Flags [P.],
   3627		 * seq 1305692979:1305693027, ack 3650467037, win 65535,
   3628		 * options [nop,nop,TS val 2502645400 ecr 3971138], length 48
   3629		 */
   3630		{ 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
   3631		  0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
   3632		  0x08, 0x00,
   3633		  0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
   3634		  0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
   3635		  0x0a, 0x01, 0x01, 0x95, /* ip src */
   3636		  0x0a, 0x01, 0x02, 0x0a, /* ip dst */
   3637		  0xc2, 0x24,
   3638		  0x00, 0x16 /* dst port */ },
   3639		{ { 10, 0 }, { 30, 0 }, { 100, 65535 } },
   3640	},
   3641	{
   3642		"tcpdump complex",
   3643		.u.insns = {
   3644			/* tcpdump -nei eth0 'tcp port 22 and (((ip[2:2] -
   3645			 * ((ip[0]&0xf)<<2)) - ((tcp[12]&0xf0)>>2)) != 0) and
   3646			 * (len > 115 or len < 30000000000)' -d
   3647			 */
   3648			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
   3649			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 30, 0),
   3650			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x800, 0, 29),
   3651			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
   3652			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 0, 27),
   3653			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
   3654			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 25, 0),
   3655			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
   3656			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
   3657			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
   3658			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
   3659			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 20),
   3660			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 16),
   3661			BPF_STMT(BPF_ST, 1),
   3662			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 14),
   3663			BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf),
   3664			BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 2),
   3665			BPF_STMT(BPF_MISC | BPF_TAX, 0x5), /* libpcap emits K on TAX */
   3666			BPF_STMT(BPF_LD | BPF_MEM, 1),
   3667			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
   3668			BPF_STMT(BPF_ST, 5),
   3669			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
   3670			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 26),
   3671			BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
   3672			BPF_STMT(BPF_ALU | BPF_RSH | BPF_K, 2),
   3673			BPF_STMT(BPF_MISC | BPF_TAX, 0x9), /* libpcap emits K on TAX */
   3674			BPF_STMT(BPF_LD | BPF_MEM, 5),
   3675			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 4, 0),
   3676			BPF_STMT(BPF_LD | BPF_LEN, 0),
   3677			BPF_JUMP(BPF_JMP | BPF_JGT | BPF_K, 0x73, 1, 0),
   3678			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 0xfc23ac00, 1, 0),
   3679			BPF_STMT(BPF_RET | BPF_K, 0xffff),
   3680			BPF_STMT(BPF_RET | BPF_K, 0),
   3681		},
   3682		CLASSIC,
   3683		{ 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
   3684		  0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
   3685		  0x08, 0x00,
   3686		  0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
   3687		  0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
   3688		  0x0a, 0x01, 0x01, 0x95, /* ip src */
   3689		  0x0a, 0x01, 0x02, 0x0a, /* ip dst */
   3690		  0xc2, 0x24,
   3691		  0x00, 0x16 /* dst port */ },
   3692		{ { 10, 0 }, { 30, 0 }, { 100, 65535 } },
   3693	},
   3694	{
   3695		"RET_A",
   3696		.u.insns = {
   3697			/* check that uninitialized X and A contain zeros */
   3698			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   3699			BPF_STMT(BPF_RET | BPF_A, 0)
   3700		},
   3701		CLASSIC,
   3702		{ },
   3703		{ {1, 0}, {2, 0} },
   3704	},
   3705	{
   3706		"INT: ADD trivial",
   3707		.u.insns_int = {
   3708			BPF_ALU64_IMM(BPF_MOV, R1, 1),
   3709			BPF_ALU64_IMM(BPF_ADD, R1, 2),
   3710			BPF_ALU64_IMM(BPF_MOV, R2, 3),
   3711			BPF_ALU64_REG(BPF_SUB, R1, R2),
   3712			BPF_ALU64_IMM(BPF_ADD, R1, -1),
   3713			BPF_ALU64_IMM(BPF_MUL, R1, 3),
   3714			BPF_ALU64_REG(BPF_MOV, R0, R1),
   3715			BPF_EXIT_INSN(),
   3716		},
   3717		INTERNAL,
   3718		{ },
   3719		{ { 0, 0xfffffffd } }
   3720	},
   3721	{
   3722		"INT: MUL_X",
   3723		.u.insns_int = {
   3724			BPF_ALU64_IMM(BPF_MOV, R0, -1),
   3725			BPF_ALU64_IMM(BPF_MOV, R1, -1),
   3726			BPF_ALU64_IMM(BPF_MOV, R2, 3),
   3727			BPF_ALU64_REG(BPF_MUL, R1, R2),
   3728			BPF_JMP_IMM(BPF_JEQ, R1, 0xfffffffd, 1),
   3729			BPF_EXIT_INSN(),
   3730			BPF_ALU64_IMM(BPF_MOV, R0, 1),
   3731			BPF_EXIT_INSN(),
   3732		},
   3733		INTERNAL,
   3734		{ },
   3735		{ { 0, 1 } }
   3736	},
   3737	{
   3738		"INT: MUL_X2",
   3739		.u.insns_int = {
   3740			BPF_ALU32_IMM(BPF_MOV, R0, -1),
   3741			BPF_ALU32_IMM(BPF_MOV, R1, -1),
   3742			BPF_ALU32_IMM(BPF_MOV, R2, 3),
   3743			BPF_ALU64_REG(BPF_MUL, R1, R2),
   3744			BPF_ALU64_IMM(BPF_RSH, R1, 8),
   3745			BPF_JMP_IMM(BPF_JEQ, R1, 0x2ffffff, 1),
   3746			BPF_EXIT_INSN(),
   3747			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   3748			BPF_EXIT_INSN(),
   3749		},
   3750		INTERNAL,
   3751		{ },
   3752		{ { 0, 1 } }
   3753	},
   3754	{
   3755		"INT: MUL32_X",
   3756		.u.insns_int = {
   3757			BPF_ALU32_IMM(BPF_MOV, R0, -1),
   3758			BPF_ALU64_IMM(BPF_MOV, R1, -1),
   3759			BPF_ALU32_IMM(BPF_MOV, R2, 3),
   3760			BPF_ALU32_REG(BPF_MUL, R1, R2),
   3761			BPF_ALU64_IMM(BPF_RSH, R1, 8),
   3762			BPF_JMP_IMM(BPF_JEQ, R1, 0xffffff, 1),
   3763			BPF_EXIT_INSN(),
   3764			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   3765			BPF_EXIT_INSN(),
   3766		},
   3767		INTERNAL,
   3768		{ },
   3769		{ { 0, 1 } }
   3770	},
   3771	{
   3772		/* Have to test all register combinations, since
   3773		 * JITing of different registers will produce
   3774		 * different asm code.
   3775		 */
   3776		"INT: ADD 64-bit",
   3777		.u.insns_int = {
   3778			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   3779			BPF_ALU64_IMM(BPF_MOV, R1, 1),
   3780			BPF_ALU64_IMM(BPF_MOV, R2, 2),
   3781			BPF_ALU64_IMM(BPF_MOV, R3, 3),
   3782			BPF_ALU64_IMM(BPF_MOV, R4, 4),
   3783			BPF_ALU64_IMM(BPF_MOV, R5, 5),
   3784			BPF_ALU64_IMM(BPF_MOV, R6, 6),
   3785			BPF_ALU64_IMM(BPF_MOV, R7, 7),
   3786			BPF_ALU64_IMM(BPF_MOV, R8, 8),
   3787			BPF_ALU64_IMM(BPF_MOV, R9, 9),
   3788			BPF_ALU64_IMM(BPF_ADD, R0, 20),
   3789			BPF_ALU64_IMM(BPF_ADD, R1, 20),
   3790			BPF_ALU64_IMM(BPF_ADD, R2, 20),
   3791			BPF_ALU64_IMM(BPF_ADD, R3, 20),
   3792			BPF_ALU64_IMM(BPF_ADD, R4, 20),
   3793			BPF_ALU64_IMM(BPF_ADD, R5, 20),
   3794			BPF_ALU64_IMM(BPF_ADD, R6, 20),
   3795			BPF_ALU64_IMM(BPF_ADD, R7, 20),
   3796			BPF_ALU64_IMM(BPF_ADD, R8, 20),
   3797			BPF_ALU64_IMM(BPF_ADD, R9, 20),
   3798			BPF_ALU64_IMM(BPF_SUB, R0, 10),
   3799			BPF_ALU64_IMM(BPF_SUB, R1, 10),
   3800			BPF_ALU64_IMM(BPF_SUB, R2, 10),
   3801			BPF_ALU64_IMM(BPF_SUB, R3, 10),
   3802			BPF_ALU64_IMM(BPF_SUB, R4, 10),
   3803			BPF_ALU64_IMM(BPF_SUB, R5, 10),
   3804			BPF_ALU64_IMM(BPF_SUB, R6, 10),
   3805			BPF_ALU64_IMM(BPF_SUB, R7, 10),
   3806			BPF_ALU64_IMM(BPF_SUB, R8, 10),
   3807			BPF_ALU64_IMM(BPF_SUB, R9, 10),
   3808			BPF_ALU64_REG(BPF_ADD, R0, R0),
   3809			BPF_ALU64_REG(BPF_ADD, R0, R1),
   3810			BPF_ALU64_REG(BPF_ADD, R0, R2),
   3811			BPF_ALU64_REG(BPF_ADD, R0, R3),
   3812			BPF_ALU64_REG(BPF_ADD, R0, R4),
   3813			BPF_ALU64_REG(BPF_ADD, R0, R5),
   3814			BPF_ALU64_REG(BPF_ADD, R0, R6),
   3815			BPF_ALU64_REG(BPF_ADD, R0, R7),
   3816			BPF_ALU64_REG(BPF_ADD, R0, R8),
   3817			BPF_ALU64_REG(BPF_ADD, R0, R9), /* R0 == 155 */
   3818			BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
   3819			BPF_EXIT_INSN(),
   3820			BPF_ALU64_REG(BPF_ADD, R1, R0),
   3821			BPF_ALU64_REG(BPF_ADD, R1, R1),
   3822			BPF_ALU64_REG(BPF_ADD, R1, R2),
   3823			BPF_ALU64_REG(BPF_ADD, R1, R3),
   3824			BPF_ALU64_REG(BPF_ADD, R1, R4),
   3825			BPF_ALU64_REG(BPF_ADD, R1, R5),
   3826			BPF_ALU64_REG(BPF_ADD, R1, R6),
   3827			BPF_ALU64_REG(BPF_ADD, R1, R7),
   3828			BPF_ALU64_REG(BPF_ADD, R1, R8),
   3829			BPF_ALU64_REG(BPF_ADD, R1, R9), /* R1 == 456 */
   3830			BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
   3831			BPF_EXIT_INSN(),
   3832			BPF_ALU64_REG(BPF_ADD, R2, R0),
   3833			BPF_ALU64_REG(BPF_ADD, R2, R1),
   3834			BPF_ALU64_REG(BPF_ADD, R2, R2),
   3835			BPF_ALU64_REG(BPF_ADD, R2, R3),
   3836			BPF_ALU64_REG(BPF_ADD, R2, R4),
   3837			BPF_ALU64_REG(BPF_ADD, R2, R5),
   3838			BPF_ALU64_REG(BPF_ADD, R2, R6),
   3839			BPF_ALU64_REG(BPF_ADD, R2, R7),
   3840			BPF_ALU64_REG(BPF_ADD, R2, R8),
   3841			BPF_ALU64_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
   3842			BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
   3843			BPF_EXIT_INSN(),
   3844			BPF_ALU64_REG(BPF_ADD, R3, R0),
   3845			BPF_ALU64_REG(BPF_ADD, R3, R1),
   3846			BPF_ALU64_REG(BPF_ADD, R3, R2),
   3847			BPF_ALU64_REG(BPF_ADD, R3, R3),
   3848			BPF_ALU64_REG(BPF_ADD, R3, R4),
   3849			BPF_ALU64_REG(BPF_ADD, R3, R5),
   3850			BPF_ALU64_REG(BPF_ADD, R3, R6),
   3851			BPF_ALU64_REG(BPF_ADD, R3, R7),
   3852			BPF_ALU64_REG(BPF_ADD, R3, R8),
   3853			BPF_ALU64_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
   3854			BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
   3855			BPF_EXIT_INSN(),
   3856			BPF_ALU64_REG(BPF_ADD, R4, R0),
   3857			BPF_ALU64_REG(BPF_ADD, R4, R1),
   3858			BPF_ALU64_REG(BPF_ADD, R4, R2),
   3859			BPF_ALU64_REG(BPF_ADD, R4, R3),
   3860			BPF_ALU64_REG(BPF_ADD, R4, R4),
   3861			BPF_ALU64_REG(BPF_ADD, R4, R5),
   3862			BPF_ALU64_REG(BPF_ADD, R4, R6),
   3863			BPF_ALU64_REG(BPF_ADD, R4, R7),
   3864			BPF_ALU64_REG(BPF_ADD, R4, R8),
   3865			BPF_ALU64_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
   3866			BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
   3867			BPF_EXIT_INSN(),
   3868			BPF_ALU64_REG(BPF_ADD, R5, R0),
   3869			BPF_ALU64_REG(BPF_ADD, R5, R1),
   3870			BPF_ALU64_REG(BPF_ADD, R5, R2),
   3871			BPF_ALU64_REG(BPF_ADD, R5, R3),
   3872			BPF_ALU64_REG(BPF_ADD, R5, R4),
   3873			BPF_ALU64_REG(BPF_ADD, R5, R5),
   3874			BPF_ALU64_REG(BPF_ADD, R5, R6),
   3875			BPF_ALU64_REG(BPF_ADD, R5, R7),
   3876			BPF_ALU64_REG(BPF_ADD, R5, R8),
   3877			BPF_ALU64_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
   3878			BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
   3879			BPF_EXIT_INSN(),
   3880			BPF_ALU64_REG(BPF_ADD, R6, R0),
   3881			BPF_ALU64_REG(BPF_ADD, R6, R1),
   3882			BPF_ALU64_REG(BPF_ADD, R6, R2),
   3883			BPF_ALU64_REG(BPF_ADD, R6, R3),
   3884			BPF_ALU64_REG(BPF_ADD, R6, R4),
   3885			BPF_ALU64_REG(BPF_ADD, R6, R5),
   3886			BPF_ALU64_REG(BPF_ADD, R6, R6),
   3887			BPF_ALU64_REG(BPF_ADD, R6, R7),
   3888			BPF_ALU64_REG(BPF_ADD, R6, R8),
   3889			BPF_ALU64_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
   3890			BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
   3891			BPF_EXIT_INSN(),
   3892			BPF_ALU64_REG(BPF_ADD, R7, R0),
   3893			BPF_ALU64_REG(BPF_ADD, R7, R1),
   3894			BPF_ALU64_REG(BPF_ADD, R7, R2),
   3895			BPF_ALU64_REG(BPF_ADD, R7, R3),
   3896			BPF_ALU64_REG(BPF_ADD, R7, R4),
   3897			BPF_ALU64_REG(BPF_ADD, R7, R5),
   3898			BPF_ALU64_REG(BPF_ADD, R7, R6),
   3899			BPF_ALU64_REG(BPF_ADD, R7, R7),
   3900			BPF_ALU64_REG(BPF_ADD, R7, R8),
   3901			BPF_ALU64_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
   3902			BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
   3903			BPF_EXIT_INSN(),
   3904			BPF_ALU64_REG(BPF_ADD, R8, R0),
   3905			BPF_ALU64_REG(BPF_ADD, R8, R1),
   3906			BPF_ALU64_REG(BPF_ADD, R8, R2),
   3907			BPF_ALU64_REG(BPF_ADD, R8, R3),
   3908			BPF_ALU64_REG(BPF_ADD, R8, R4),
   3909			BPF_ALU64_REG(BPF_ADD, R8, R5),
   3910			BPF_ALU64_REG(BPF_ADD, R8, R6),
   3911			BPF_ALU64_REG(BPF_ADD, R8, R7),
   3912			BPF_ALU64_REG(BPF_ADD, R8, R8),
   3913			BPF_ALU64_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
   3914			BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
   3915			BPF_EXIT_INSN(),
   3916			BPF_ALU64_REG(BPF_ADD, R9, R0),
   3917			BPF_ALU64_REG(BPF_ADD, R9, R1),
   3918			BPF_ALU64_REG(BPF_ADD, R9, R2),
   3919			BPF_ALU64_REG(BPF_ADD, R9, R3),
   3920			BPF_ALU64_REG(BPF_ADD, R9, R4),
   3921			BPF_ALU64_REG(BPF_ADD, R9, R5),
   3922			BPF_ALU64_REG(BPF_ADD, R9, R6),
   3923			BPF_ALU64_REG(BPF_ADD, R9, R7),
   3924			BPF_ALU64_REG(BPF_ADD, R9, R8),
   3925			BPF_ALU64_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
   3926			BPF_ALU64_REG(BPF_MOV, R0, R9),
   3927			BPF_EXIT_INSN(),
   3928		},
   3929		INTERNAL,
   3930		{ },
   3931		{ { 0, 2957380 } }
   3932	},
   3933	{
   3934		"INT: ADD 32-bit",
   3935		.u.insns_int = {
   3936			BPF_ALU32_IMM(BPF_MOV, R0, 20),
   3937			BPF_ALU32_IMM(BPF_MOV, R1, 1),
   3938			BPF_ALU32_IMM(BPF_MOV, R2, 2),
   3939			BPF_ALU32_IMM(BPF_MOV, R3, 3),
   3940			BPF_ALU32_IMM(BPF_MOV, R4, 4),
   3941			BPF_ALU32_IMM(BPF_MOV, R5, 5),
   3942			BPF_ALU32_IMM(BPF_MOV, R6, 6),
   3943			BPF_ALU32_IMM(BPF_MOV, R7, 7),
   3944			BPF_ALU32_IMM(BPF_MOV, R8, 8),
   3945			BPF_ALU32_IMM(BPF_MOV, R9, 9),
   3946			BPF_ALU64_IMM(BPF_ADD, R1, 10),
   3947			BPF_ALU64_IMM(BPF_ADD, R2, 10),
   3948			BPF_ALU64_IMM(BPF_ADD, R3, 10),
   3949			BPF_ALU64_IMM(BPF_ADD, R4, 10),
   3950			BPF_ALU64_IMM(BPF_ADD, R5, 10),
   3951			BPF_ALU64_IMM(BPF_ADD, R6, 10),
   3952			BPF_ALU64_IMM(BPF_ADD, R7, 10),
   3953			BPF_ALU64_IMM(BPF_ADD, R8, 10),
   3954			BPF_ALU64_IMM(BPF_ADD, R9, 10),
   3955			BPF_ALU32_REG(BPF_ADD, R0, R1),
   3956			BPF_ALU32_REG(BPF_ADD, R0, R2),
   3957			BPF_ALU32_REG(BPF_ADD, R0, R3),
   3958			BPF_ALU32_REG(BPF_ADD, R0, R4),
   3959			BPF_ALU32_REG(BPF_ADD, R0, R5),
   3960			BPF_ALU32_REG(BPF_ADD, R0, R6),
   3961			BPF_ALU32_REG(BPF_ADD, R0, R7),
   3962			BPF_ALU32_REG(BPF_ADD, R0, R8),
   3963			BPF_ALU32_REG(BPF_ADD, R0, R9), /* R0 == 155 */
   3964			BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
   3965			BPF_EXIT_INSN(),
   3966			BPF_ALU32_REG(BPF_ADD, R1, R0),
   3967			BPF_ALU32_REG(BPF_ADD, R1, R1),
   3968			BPF_ALU32_REG(BPF_ADD, R1, R2),
   3969			BPF_ALU32_REG(BPF_ADD, R1, R3),
   3970			BPF_ALU32_REG(BPF_ADD, R1, R4),
   3971			BPF_ALU32_REG(BPF_ADD, R1, R5),
   3972			BPF_ALU32_REG(BPF_ADD, R1, R6),
   3973			BPF_ALU32_REG(BPF_ADD, R1, R7),
   3974			BPF_ALU32_REG(BPF_ADD, R1, R8),
   3975			BPF_ALU32_REG(BPF_ADD, R1, R9), /* R1 == 456 */
   3976			BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
   3977			BPF_EXIT_INSN(),
   3978			BPF_ALU32_REG(BPF_ADD, R2, R0),
   3979			BPF_ALU32_REG(BPF_ADD, R2, R1),
   3980			BPF_ALU32_REG(BPF_ADD, R2, R2),
   3981			BPF_ALU32_REG(BPF_ADD, R2, R3),
   3982			BPF_ALU32_REG(BPF_ADD, R2, R4),
   3983			BPF_ALU32_REG(BPF_ADD, R2, R5),
   3984			BPF_ALU32_REG(BPF_ADD, R2, R6),
   3985			BPF_ALU32_REG(BPF_ADD, R2, R7),
   3986			BPF_ALU32_REG(BPF_ADD, R2, R8),
   3987			BPF_ALU32_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
   3988			BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
   3989			BPF_EXIT_INSN(),
   3990			BPF_ALU32_REG(BPF_ADD, R3, R0),
   3991			BPF_ALU32_REG(BPF_ADD, R3, R1),
   3992			BPF_ALU32_REG(BPF_ADD, R3, R2),
   3993			BPF_ALU32_REG(BPF_ADD, R3, R3),
   3994			BPF_ALU32_REG(BPF_ADD, R3, R4),
   3995			BPF_ALU32_REG(BPF_ADD, R3, R5),
   3996			BPF_ALU32_REG(BPF_ADD, R3, R6),
   3997			BPF_ALU32_REG(BPF_ADD, R3, R7),
   3998			BPF_ALU32_REG(BPF_ADD, R3, R8),
   3999			BPF_ALU32_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
   4000			BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
   4001			BPF_EXIT_INSN(),
   4002			BPF_ALU32_REG(BPF_ADD, R4, R0),
   4003			BPF_ALU32_REG(BPF_ADD, R4, R1),
   4004			BPF_ALU32_REG(BPF_ADD, R4, R2),
   4005			BPF_ALU32_REG(BPF_ADD, R4, R3),
   4006			BPF_ALU32_REG(BPF_ADD, R4, R4),
   4007			BPF_ALU32_REG(BPF_ADD, R4, R5),
   4008			BPF_ALU32_REG(BPF_ADD, R4, R6),
   4009			BPF_ALU32_REG(BPF_ADD, R4, R7),
   4010			BPF_ALU32_REG(BPF_ADD, R4, R8),
   4011			BPF_ALU32_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
   4012			BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
   4013			BPF_EXIT_INSN(),
   4014			BPF_ALU32_REG(BPF_ADD, R5, R0),
   4015			BPF_ALU32_REG(BPF_ADD, R5, R1),
   4016			BPF_ALU32_REG(BPF_ADD, R5, R2),
   4017			BPF_ALU32_REG(BPF_ADD, R5, R3),
   4018			BPF_ALU32_REG(BPF_ADD, R5, R4),
   4019			BPF_ALU32_REG(BPF_ADD, R5, R5),
   4020			BPF_ALU32_REG(BPF_ADD, R5, R6),
   4021			BPF_ALU32_REG(BPF_ADD, R5, R7),
   4022			BPF_ALU32_REG(BPF_ADD, R5, R8),
   4023			BPF_ALU32_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
   4024			BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
   4025			BPF_EXIT_INSN(),
   4026			BPF_ALU32_REG(BPF_ADD, R6, R0),
   4027			BPF_ALU32_REG(BPF_ADD, R6, R1),
   4028			BPF_ALU32_REG(BPF_ADD, R6, R2),
   4029			BPF_ALU32_REG(BPF_ADD, R6, R3),
   4030			BPF_ALU32_REG(BPF_ADD, R6, R4),
   4031			BPF_ALU32_REG(BPF_ADD, R6, R5),
   4032			BPF_ALU32_REG(BPF_ADD, R6, R6),
   4033			BPF_ALU32_REG(BPF_ADD, R6, R7),
   4034			BPF_ALU32_REG(BPF_ADD, R6, R8),
   4035			BPF_ALU32_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
   4036			BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
   4037			BPF_EXIT_INSN(),
   4038			BPF_ALU32_REG(BPF_ADD, R7, R0),
   4039			BPF_ALU32_REG(BPF_ADD, R7, R1),
   4040			BPF_ALU32_REG(BPF_ADD, R7, R2),
   4041			BPF_ALU32_REG(BPF_ADD, R7, R3),
   4042			BPF_ALU32_REG(BPF_ADD, R7, R4),
   4043			BPF_ALU32_REG(BPF_ADD, R7, R5),
   4044			BPF_ALU32_REG(BPF_ADD, R7, R6),
   4045			BPF_ALU32_REG(BPF_ADD, R7, R7),
   4046			BPF_ALU32_REG(BPF_ADD, R7, R8),
   4047			BPF_ALU32_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
   4048			BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
   4049			BPF_EXIT_INSN(),
   4050			BPF_ALU32_REG(BPF_ADD, R8, R0),
   4051			BPF_ALU32_REG(BPF_ADD, R8, R1),
   4052			BPF_ALU32_REG(BPF_ADD, R8, R2),
   4053			BPF_ALU32_REG(BPF_ADD, R8, R3),
   4054			BPF_ALU32_REG(BPF_ADD, R8, R4),
   4055			BPF_ALU32_REG(BPF_ADD, R8, R5),
   4056			BPF_ALU32_REG(BPF_ADD, R8, R6),
   4057			BPF_ALU32_REG(BPF_ADD, R8, R7),
   4058			BPF_ALU32_REG(BPF_ADD, R8, R8),
   4059			BPF_ALU32_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
   4060			BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
   4061			BPF_EXIT_INSN(),
   4062			BPF_ALU32_REG(BPF_ADD, R9, R0),
   4063			BPF_ALU32_REG(BPF_ADD, R9, R1),
   4064			BPF_ALU32_REG(BPF_ADD, R9, R2),
   4065			BPF_ALU32_REG(BPF_ADD, R9, R3),
   4066			BPF_ALU32_REG(BPF_ADD, R9, R4),
   4067			BPF_ALU32_REG(BPF_ADD, R9, R5),
   4068			BPF_ALU32_REG(BPF_ADD, R9, R6),
   4069			BPF_ALU32_REG(BPF_ADD, R9, R7),
   4070			BPF_ALU32_REG(BPF_ADD, R9, R8),
   4071			BPF_ALU32_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
   4072			BPF_ALU32_REG(BPF_MOV, R0, R9),
   4073			BPF_EXIT_INSN(),
   4074		},
   4075		INTERNAL,
   4076		{ },
   4077		{ { 0, 2957380 } }
   4078	},
   4079	{	/* Mainly checking JIT here. */
   4080		"INT: SUB",
   4081		.u.insns_int = {
   4082			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   4083			BPF_ALU64_IMM(BPF_MOV, R1, 1),
   4084			BPF_ALU64_IMM(BPF_MOV, R2, 2),
   4085			BPF_ALU64_IMM(BPF_MOV, R3, 3),
   4086			BPF_ALU64_IMM(BPF_MOV, R4, 4),
   4087			BPF_ALU64_IMM(BPF_MOV, R5, 5),
   4088			BPF_ALU64_IMM(BPF_MOV, R6, 6),
   4089			BPF_ALU64_IMM(BPF_MOV, R7, 7),
   4090			BPF_ALU64_IMM(BPF_MOV, R8, 8),
   4091			BPF_ALU64_IMM(BPF_MOV, R9, 9),
   4092			BPF_ALU64_REG(BPF_SUB, R0, R0),
   4093			BPF_ALU64_REG(BPF_SUB, R0, R1),
   4094			BPF_ALU64_REG(BPF_SUB, R0, R2),
   4095			BPF_ALU64_REG(BPF_SUB, R0, R3),
   4096			BPF_ALU64_REG(BPF_SUB, R0, R4),
   4097			BPF_ALU64_REG(BPF_SUB, R0, R5),
   4098			BPF_ALU64_REG(BPF_SUB, R0, R6),
   4099			BPF_ALU64_REG(BPF_SUB, R0, R7),
   4100			BPF_ALU64_REG(BPF_SUB, R0, R8),
   4101			BPF_ALU64_REG(BPF_SUB, R0, R9),
   4102			BPF_ALU64_IMM(BPF_SUB, R0, 10),
   4103			BPF_JMP_IMM(BPF_JEQ, R0, -55, 1),
   4104			BPF_EXIT_INSN(),
   4105			BPF_ALU64_REG(BPF_SUB, R1, R0),
   4106			BPF_ALU64_REG(BPF_SUB, R1, R2),
   4107			BPF_ALU64_REG(BPF_SUB, R1, R3),
   4108			BPF_ALU64_REG(BPF_SUB, R1, R4),
   4109			BPF_ALU64_REG(BPF_SUB, R1, R5),
   4110			BPF_ALU64_REG(BPF_SUB, R1, R6),
   4111			BPF_ALU64_REG(BPF_SUB, R1, R7),
   4112			BPF_ALU64_REG(BPF_SUB, R1, R8),
   4113			BPF_ALU64_REG(BPF_SUB, R1, R9),
   4114			BPF_ALU64_IMM(BPF_SUB, R1, 10),
   4115			BPF_ALU64_REG(BPF_SUB, R2, R0),
   4116			BPF_ALU64_REG(BPF_SUB, R2, R1),
   4117			BPF_ALU64_REG(BPF_SUB, R2, R3),
   4118			BPF_ALU64_REG(BPF_SUB, R2, R4),
   4119			BPF_ALU64_REG(BPF_SUB, R2, R5),
   4120			BPF_ALU64_REG(BPF_SUB, R2, R6),
   4121			BPF_ALU64_REG(BPF_SUB, R2, R7),
   4122			BPF_ALU64_REG(BPF_SUB, R2, R8),
   4123			BPF_ALU64_REG(BPF_SUB, R2, R9),
   4124			BPF_ALU64_IMM(BPF_SUB, R2, 10),
   4125			BPF_ALU64_REG(BPF_SUB, R3, R0),
   4126			BPF_ALU64_REG(BPF_SUB, R3, R1),
   4127			BPF_ALU64_REG(BPF_SUB, R3, R2),
   4128			BPF_ALU64_REG(BPF_SUB, R3, R4),
   4129			BPF_ALU64_REG(BPF_SUB, R3, R5),
   4130			BPF_ALU64_REG(BPF_SUB, R3, R6),
   4131			BPF_ALU64_REG(BPF_SUB, R3, R7),
   4132			BPF_ALU64_REG(BPF_SUB, R3, R8),
   4133			BPF_ALU64_REG(BPF_SUB, R3, R9),
   4134			BPF_ALU64_IMM(BPF_SUB, R3, 10),
   4135			BPF_ALU64_REG(BPF_SUB, R4, R0),
   4136			BPF_ALU64_REG(BPF_SUB, R4, R1),
   4137			BPF_ALU64_REG(BPF_SUB, R4, R2),
   4138			BPF_ALU64_REG(BPF_SUB, R4, R3),
   4139			BPF_ALU64_REG(BPF_SUB, R4, R5),
   4140			BPF_ALU64_REG(BPF_SUB, R4, R6),
   4141			BPF_ALU64_REG(BPF_SUB, R4, R7),
   4142			BPF_ALU64_REG(BPF_SUB, R4, R8),
   4143			BPF_ALU64_REG(BPF_SUB, R4, R9),
   4144			BPF_ALU64_IMM(BPF_SUB, R4, 10),
   4145			BPF_ALU64_REG(BPF_SUB, R5, R0),
   4146			BPF_ALU64_REG(BPF_SUB, R5, R1),
   4147			BPF_ALU64_REG(BPF_SUB, R5, R2),
   4148			BPF_ALU64_REG(BPF_SUB, R5, R3),
   4149			BPF_ALU64_REG(BPF_SUB, R5, R4),
   4150			BPF_ALU64_REG(BPF_SUB, R5, R6),
   4151			BPF_ALU64_REG(BPF_SUB, R5, R7),
   4152			BPF_ALU64_REG(BPF_SUB, R5, R8),
   4153			BPF_ALU64_REG(BPF_SUB, R5, R9),
   4154			BPF_ALU64_IMM(BPF_SUB, R5, 10),
   4155			BPF_ALU64_REG(BPF_SUB, R6, R0),
   4156			BPF_ALU64_REG(BPF_SUB, R6, R1),
   4157			BPF_ALU64_REG(BPF_SUB, R6, R2),
   4158			BPF_ALU64_REG(BPF_SUB, R6, R3),
   4159			BPF_ALU64_REG(BPF_SUB, R6, R4),
   4160			BPF_ALU64_REG(BPF_SUB, R6, R5),
   4161			BPF_ALU64_REG(BPF_SUB, R6, R7),
   4162			BPF_ALU64_REG(BPF_SUB, R6, R8),
   4163			BPF_ALU64_REG(BPF_SUB, R6, R9),
   4164			BPF_ALU64_IMM(BPF_SUB, R6, 10),
   4165			BPF_ALU64_REG(BPF_SUB, R7, R0),
   4166			BPF_ALU64_REG(BPF_SUB, R7, R1),
   4167			BPF_ALU64_REG(BPF_SUB, R7, R2),
   4168			BPF_ALU64_REG(BPF_SUB, R7, R3),
   4169			BPF_ALU64_REG(BPF_SUB, R7, R4),
   4170			BPF_ALU64_REG(BPF_SUB, R7, R5),
   4171			BPF_ALU64_REG(BPF_SUB, R7, R6),
   4172			BPF_ALU64_REG(BPF_SUB, R7, R8),
   4173			BPF_ALU64_REG(BPF_SUB, R7, R9),
   4174			BPF_ALU64_IMM(BPF_SUB, R7, 10),
   4175			BPF_ALU64_REG(BPF_SUB, R8, R0),
   4176			BPF_ALU64_REG(BPF_SUB, R8, R1),
   4177			BPF_ALU64_REG(BPF_SUB, R8, R2),
   4178			BPF_ALU64_REG(BPF_SUB, R8, R3),
   4179			BPF_ALU64_REG(BPF_SUB, R8, R4),
   4180			BPF_ALU64_REG(BPF_SUB, R8, R5),
   4181			BPF_ALU64_REG(BPF_SUB, R8, R6),
   4182			BPF_ALU64_REG(BPF_SUB, R8, R7),
   4183			BPF_ALU64_REG(BPF_SUB, R8, R9),
   4184			BPF_ALU64_IMM(BPF_SUB, R8, 10),
   4185			BPF_ALU64_REG(BPF_SUB, R9, R0),
   4186			BPF_ALU64_REG(BPF_SUB, R9, R1),
   4187			BPF_ALU64_REG(BPF_SUB, R9, R2),
   4188			BPF_ALU64_REG(BPF_SUB, R9, R3),
   4189			BPF_ALU64_REG(BPF_SUB, R9, R4),
   4190			BPF_ALU64_REG(BPF_SUB, R9, R5),
   4191			BPF_ALU64_REG(BPF_SUB, R9, R6),
   4192			BPF_ALU64_REG(BPF_SUB, R9, R7),
   4193			BPF_ALU64_REG(BPF_SUB, R9, R8),
   4194			BPF_ALU64_IMM(BPF_SUB, R9, 10),
   4195			BPF_ALU64_IMM(BPF_SUB, R0, 10),
   4196			BPF_ALU64_IMM(BPF_NEG, R0, 0),
   4197			BPF_ALU64_REG(BPF_SUB, R0, R1),
   4198			BPF_ALU64_REG(BPF_SUB, R0, R2),
   4199			BPF_ALU64_REG(BPF_SUB, R0, R3),
   4200			BPF_ALU64_REG(BPF_SUB, R0, R4),
   4201			BPF_ALU64_REG(BPF_SUB, R0, R5),
   4202			BPF_ALU64_REG(BPF_SUB, R0, R6),
   4203			BPF_ALU64_REG(BPF_SUB, R0, R7),
   4204			BPF_ALU64_REG(BPF_SUB, R0, R8),
   4205			BPF_ALU64_REG(BPF_SUB, R0, R9),
   4206			BPF_EXIT_INSN(),
   4207		},
   4208		INTERNAL,
   4209		{ },
   4210		{ { 0, 11 } }
   4211	},
   4212	{	/* Mainly checking JIT here. */
   4213		"INT: XOR",
   4214		.u.insns_int = {
   4215			BPF_ALU64_REG(BPF_SUB, R0, R0),
   4216			BPF_ALU64_REG(BPF_XOR, R1, R1),
   4217			BPF_JMP_REG(BPF_JEQ, R0, R1, 1),
   4218			BPF_EXIT_INSN(),
   4219			BPF_ALU64_IMM(BPF_MOV, R0, 10),
   4220			BPF_ALU64_IMM(BPF_MOV, R1, -1),
   4221			BPF_ALU64_REG(BPF_SUB, R1, R1),
   4222			BPF_ALU64_REG(BPF_XOR, R2, R2),
   4223			BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
   4224			BPF_EXIT_INSN(),
   4225			BPF_ALU64_REG(BPF_SUB, R2, R2),
   4226			BPF_ALU64_REG(BPF_XOR, R3, R3),
   4227			BPF_ALU64_IMM(BPF_MOV, R0, 10),
   4228			BPF_ALU64_IMM(BPF_MOV, R1, -1),
   4229			BPF_JMP_REG(BPF_JEQ, R2, R3, 1),
   4230			BPF_EXIT_INSN(),
   4231			BPF_ALU64_REG(BPF_SUB, R3, R3),
   4232			BPF_ALU64_REG(BPF_XOR, R4, R4),
   4233			BPF_ALU64_IMM(BPF_MOV, R2, 1),
   4234			BPF_ALU64_IMM(BPF_MOV, R5, -1),
   4235			BPF_JMP_REG(BPF_JEQ, R3, R4, 1),
   4236			BPF_EXIT_INSN(),
   4237			BPF_ALU64_REG(BPF_SUB, R4, R4),
   4238			BPF_ALU64_REG(BPF_XOR, R5, R5),
   4239			BPF_ALU64_IMM(BPF_MOV, R3, 1),
   4240			BPF_ALU64_IMM(BPF_MOV, R7, -1),
   4241			BPF_JMP_REG(BPF_JEQ, R5, R4, 1),
   4242			BPF_EXIT_INSN(),
   4243			BPF_ALU64_IMM(BPF_MOV, R5, 1),
   4244			BPF_ALU64_REG(BPF_SUB, R5, R5),
   4245			BPF_ALU64_REG(BPF_XOR, R6, R6),
   4246			BPF_ALU64_IMM(BPF_MOV, R1, 1),
   4247			BPF_ALU64_IMM(BPF_MOV, R8, -1),
   4248			BPF_JMP_REG(BPF_JEQ, R5, R6, 1),
   4249			BPF_EXIT_INSN(),
   4250			BPF_ALU64_REG(BPF_SUB, R6, R6),
   4251			BPF_ALU64_REG(BPF_XOR, R7, R7),
   4252			BPF_JMP_REG(BPF_JEQ, R7, R6, 1),
   4253			BPF_EXIT_INSN(),
   4254			BPF_ALU64_REG(BPF_SUB, R7, R7),
   4255			BPF_ALU64_REG(BPF_XOR, R8, R8),
   4256			BPF_JMP_REG(BPF_JEQ, R7, R8, 1),
   4257			BPF_EXIT_INSN(),
   4258			BPF_ALU64_REG(BPF_SUB, R8, R8),
   4259			BPF_ALU64_REG(BPF_XOR, R9, R9),
   4260			BPF_JMP_REG(BPF_JEQ, R9, R8, 1),
   4261			BPF_EXIT_INSN(),
   4262			BPF_ALU64_REG(BPF_SUB, R9, R9),
   4263			BPF_ALU64_REG(BPF_XOR, R0, R0),
   4264			BPF_JMP_REG(BPF_JEQ, R9, R0, 1),
   4265			BPF_EXIT_INSN(),
   4266			BPF_ALU64_REG(BPF_SUB, R1, R1),
   4267			BPF_ALU64_REG(BPF_XOR, R0, R0),
   4268			BPF_JMP_REG(BPF_JEQ, R9, R0, 2),
   4269			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   4270			BPF_EXIT_INSN(),
   4271			BPF_ALU64_IMM(BPF_MOV, R0, 1),
   4272			BPF_EXIT_INSN(),
   4273		},
   4274		INTERNAL,
   4275		{ },
   4276		{ { 0, 1 } }
   4277	},
   4278	{	/* Mainly checking JIT here. */
   4279		"INT: MUL",
   4280		.u.insns_int = {
   4281			BPF_ALU64_IMM(BPF_MOV, R0, 11),
   4282			BPF_ALU64_IMM(BPF_MOV, R1, 1),
   4283			BPF_ALU64_IMM(BPF_MOV, R2, 2),
   4284			BPF_ALU64_IMM(BPF_MOV, R3, 3),
   4285			BPF_ALU64_IMM(BPF_MOV, R4, 4),
   4286			BPF_ALU64_IMM(BPF_MOV, R5, 5),
   4287			BPF_ALU64_IMM(BPF_MOV, R6, 6),
   4288			BPF_ALU64_IMM(BPF_MOV, R7, 7),
   4289			BPF_ALU64_IMM(BPF_MOV, R8, 8),
   4290			BPF_ALU64_IMM(BPF_MOV, R9, 9),
   4291			BPF_ALU64_REG(BPF_MUL, R0, R0),
   4292			BPF_ALU64_REG(BPF_MUL, R0, R1),
   4293			BPF_ALU64_REG(BPF_MUL, R0, R2),
   4294			BPF_ALU64_REG(BPF_MUL, R0, R3),
   4295			BPF_ALU64_REG(BPF_MUL, R0, R4),
   4296			BPF_ALU64_REG(BPF_MUL, R0, R5),
   4297			BPF_ALU64_REG(BPF_MUL, R0, R6),
   4298			BPF_ALU64_REG(BPF_MUL, R0, R7),
   4299			BPF_ALU64_REG(BPF_MUL, R0, R8),
   4300			BPF_ALU64_REG(BPF_MUL, R0, R9),
   4301			BPF_ALU64_IMM(BPF_MUL, R0, 10),
   4302			BPF_JMP_IMM(BPF_JEQ, R0, 439084800, 1),
   4303			BPF_EXIT_INSN(),
   4304			BPF_ALU64_REG(BPF_MUL, R1, R0),
   4305			BPF_ALU64_REG(BPF_MUL, R1, R2),
   4306			BPF_ALU64_REG(BPF_MUL, R1, R3),
   4307			BPF_ALU64_REG(BPF_MUL, R1, R4),
   4308			BPF_ALU64_REG(BPF_MUL, R1, R5),
   4309			BPF_ALU64_REG(BPF_MUL, R1, R6),
   4310			BPF_ALU64_REG(BPF_MUL, R1, R7),
   4311			BPF_ALU64_REG(BPF_MUL, R1, R8),
   4312			BPF_ALU64_REG(BPF_MUL, R1, R9),
   4313			BPF_ALU64_IMM(BPF_MUL, R1, 10),
   4314			BPF_ALU64_REG(BPF_MOV, R2, R1),
   4315			BPF_ALU64_IMM(BPF_RSH, R2, 32),
   4316			BPF_JMP_IMM(BPF_JEQ, R2, 0x5a924, 1),
   4317			BPF_EXIT_INSN(),
   4318			BPF_ALU64_IMM(BPF_LSH, R1, 32),
   4319			BPF_ALU64_IMM(BPF_ARSH, R1, 32),
   4320			BPF_JMP_IMM(BPF_JEQ, R1, 0xebb90000, 1),
   4321			BPF_EXIT_INSN(),
   4322			BPF_ALU64_REG(BPF_MUL, R2, R0),
   4323			BPF_ALU64_REG(BPF_MUL, R2, R1),
   4324			BPF_ALU64_REG(BPF_MUL, R2, R3),
   4325			BPF_ALU64_REG(BPF_MUL, R2, R4),
   4326			BPF_ALU64_REG(BPF_MUL, R2, R5),
   4327			BPF_ALU64_REG(BPF_MUL, R2, R6),
   4328			BPF_ALU64_REG(BPF_MUL, R2, R7),
   4329			BPF_ALU64_REG(BPF_MUL, R2, R8),
   4330			BPF_ALU64_REG(BPF_MUL, R2, R9),
   4331			BPF_ALU64_IMM(BPF_MUL, R2, 10),
   4332			BPF_ALU64_IMM(BPF_RSH, R2, 32),
   4333			BPF_ALU64_REG(BPF_MOV, R0, R2),
   4334			BPF_EXIT_INSN(),
   4335		},
   4336		INTERNAL,
   4337		{ },
   4338		{ { 0, 0x35d97ef2 } }
   4339	},
   4340	{	/* Mainly checking JIT here. */
   4341		"MOV REG64",
   4342		.u.insns_int = {
   4343			BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
   4344			BPF_MOV64_REG(R1, R0),
   4345			BPF_MOV64_REG(R2, R1),
   4346			BPF_MOV64_REG(R3, R2),
   4347			BPF_MOV64_REG(R4, R3),
   4348			BPF_MOV64_REG(R5, R4),
   4349			BPF_MOV64_REG(R6, R5),
   4350			BPF_MOV64_REG(R7, R6),
   4351			BPF_MOV64_REG(R8, R7),
   4352			BPF_MOV64_REG(R9, R8),
   4353			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   4354			BPF_ALU64_IMM(BPF_MOV, R1, 0),
   4355			BPF_ALU64_IMM(BPF_MOV, R2, 0),
   4356			BPF_ALU64_IMM(BPF_MOV, R3, 0),
   4357			BPF_ALU64_IMM(BPF_MOV, R4, 0),
   4358			BPF_ALU64_IMM(BPF_MOV, R5, 0),
   4359			BPF_ALU64_IMM(BPF_MOV, R6, 0),
   4360			BPF_ALU64_IMM(BPF_MOV, R7, 0),
   4361			BPF_ALU64_IMM(BPF_MOV, R8, 0),
   4362			BPF_ALU64_IMM(BPF_MOV, R9, 0),
   4363			BPF_ALU64_REG(BPF_ADD, R0, R0),
   4364			BPF_ALU64_REG(BPF_ADD, R0, R1),
   4365			BPF_ALU64_REG(BPF_ADD, R0, R2),
   4366			BPF_ALU64_REG(BPF_ADD, R0, R3),
   4367			BPF_ALU64_REG(BPF_ADD, R0, R4),
   4368			BPF_ALU64_REG(BPF_ADD, R0, R5),
   4369			BPF_ALU64_REG(BPF_ADD, R0, R6),
   4370			BPF_ALU64_REG(BPF_ADD, R0, R7),
   4371			BPF_ALU64_REG(BPF_ADD, R0, R8),
   4372			BPF_ALU64_REG(BPF_ADD, R0, R9),
   4373			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
   4374			BPF_EXIT_INSN(),
   4375		},
   4376		INTERNAL,
   4377		{ },
   4378		{ { 0, 0xfefe } }
   4379	},
   4380	{	/* Mainly checking JIT here. */
   4381		"MOV REG32",
   4382		.u.insns_int = {
   4383			BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
   4384			BPF_MOV64_REG(R1, R0),
   4385			BPF_MOV64_REG(R2, R1),
   4386			BPF_MOV64_REG(R3, R2),
   4387			BPF_MOV64_REG(R4, R3),
   4388			BPF_MOV64_REG(R5, R4),
   4389			BPF_MOV64_REG(R6, R5),
   4390			BPF_MOV64_REG(R7, R6),
   4391			BPF_MOV64_REG(R8, R7),
   4392			BPF_MOV64_REG(R9, R8),
   4393			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   4394			BPF_ALU32_IMM(BPF_MOV, R1, 0),
   4395			BPF_ALU32_IMM(BPF_MOV, R2, 0),
   4396			BPF_ALU32_IMM(BPF_MOV, R3, 0),
   4397			BPF_ALU32_IMM(BPF_MOV, R4, 0),
   4398			BPF_ALU32_IMM(BPF_MOV, R5, 0),
   4399			BPF_ALU32_IMM(BPF_MOV, R6, 0),
   4400			BPF_ALU32_IMM(BPF_MOV, R7, 0),
   4401			BPF_ALU32_IMM(BPF_MOV, R8, 0),
   4402			BPF_ALU32_IMM(BPF_MOV, R9, 0),
   4403			BPF_ALU64_REG(BPF_ADD, R0, R0),
   4404			BPF_ALU64_REG(BPF_ADD, R0, R1),
   4405			BPF_ALU64_REG(BPF_ADD, R0, R2),
   4406			BPF_ALU64_REG(BPF_ADD, R0, R3),
   4407			BPF_ALU64_REG(BPF_ADD, R0, R4),
   4408			BPF_ALU64_REG(BPF_ADD, R0, R5),
   4409			BPF_ALU64_REG(BPF_ADD, R0, R6),
   4410			BPF_ALU64_REG(BPF_ADD, R0, R7),
   4411			BPF_ALU64_REG(BPF_ADD, R0, R8),
   4412			BPF_ALU64_REG(BPF_ADD, R0, R9),
   4413			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
   4414			BPF_EXIT_INSN(),
   4415		},
   4416		INTERNAL,
   4417		{ },
   4418		{ { 0, 0xfefe } }
   4419	},
   4420	{	/* Mainly checking JIT here. */
   4421		"LD IMM64",
   4422		.u.insns_int = {
   4423			BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
   4424			BPF_MOV64_REG(R1, R0),
   4425			BPF_MOV64_REG(R2, R1),
   4426			BPF_MOV64_REG(R3, R2),
   4427			BPF_MOV64_REG(R4, R3),
   4428			BPF_MOV64_REG(R5, R4),
   4429			BPF_MOV64_REG(R6, R5),
   4430			BPF_MOV64_REG(R7, R6),
   4431			BPF_MOV64_REG(R8, R7),
   4432			BPF_MOV64_REG(R9, R8),
   4433			BPF_LD_IMM64(R0, 0x0LL),
   4434			BPF_LD_IMM64(R1, 0x0LL),
   4435			BPF_LD_IMM64(R2, 0x0LL),
   4436			BPF_LD_IMM64(R3, 0x0LL),
   4437			BPF_LD_IMM64(R4, 0x0LL),
   4438			BPF_LD_IMM64(R5, 0x0LL),
   4439			BPF_LD_IMM64(R6, 0x0LL),
   4440			BPF_LD_IMM64(R7, 0x0LL),
   4441			BPF_LD_IMM64(R8, 0x0LL),
   4442			BPF_LD_IMM64(R9, 0x0LL),
   4443			BPF_ALU64_REG(BPF_ADD, R0, R0),
   4444			BPF_ALU64_REG(BPF_ADD, R0, R1),
   4445			BPF_ALU64_REG(BPF_ADD, R0, R2),
   4446			BPF_ALU64_REG(BPF_ADD, R0, R3),
   4447			BPF_ALU64_REG(BPF_ADD, R0, R4),
   4448			BPF_ALU64_REG(BPF_ADD, R0, R5),
   4449			BPF_ALU64_REG(BPF_ADD, R0, R6),
   4450			BPF_ALU64_REG(BPF_ADD, R0, R7),
   4451			BPF_ALU64_REG(BPF_ADD, R0, R8),
   4452			BPF_ALU64_REG(BPF_ADD, R0, R9),
   4453			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
   4454			BPF_EXIT_INSN(),
   4455		},
   4456		INTERNAL,
   4457		{ },
   4458		{ { 0, 0xfefe } }
   4459	},
   4460	{
   4461		"INT: ALU MIX",
   4462		.u.insns_int = {
   4463			BPF_ALU64_IMM(BPF_MOV, R0, 11),
   4464			BPF_ALU64_IMM(BPF_ADD, R0, -1),
   4465			BPF_ALU64_IMM(BPF_MOV, R2, 2),
   4466			BPF_ALU64_IMM(BPF_XOR, R2, 3),
   4467			BPF_ALU64_REG(BPF_DIV, R0, R2),
   4468			BPF_JMP_IMM(BPF_JEQ, R0, 10, 1),
   4469			BPF_EXIT_INSN(),
   4470			BPF_ALU64_IMM(BPF_MOD, R0, 3),
   4471			BPF_JMP_IMM(BPF_JEQ, R0, 1, 1),
   4472			BPF_EXIT_INSN(),
   4473			BPF_ALU64_IMM(BPF_MOV, R0, -1),
   4474			BPF_EXIT_INSN(),
   4475		},
   4476		INTERNAL,
   4477		{ },
   4478		{ { 0, -1 } }
   4479	},
   4480	{
   4481		"INT: shifts by register",
   4482		.u.insns_int = {
   4483			BPF_MOV64_IMM(R0, -1234),
   4484			BPF_MOV64_IMM(R1, 1),
   4485			BPF_ALU32_REG(BPF_RSH, R0, R1),
   4486			BPF_JMP_IMM(BPF_JEQ, R0, 0x7ffffd97, 1),
   4487			BPF_EXIT_INSN(),
   4488			BPF_MOV64_IMM(R2, 1),
   4489			BPF_ALU64_REG(BPF_LSH, R0, R2),
   4490			BPF_MOV32_IMM(R4, -1234),
   4491			BPF_JMP_REG(BPF_JEQ, R0, R4, 1),
   4492			BPF_EXIT_INSN(),
   4493			BPF_ALU64_IMM(BPF_AND, R4, 63),
   4494			BPF_ALU64_REG(BPF_LSH, R0, R4), /* R0 <= 46 */
   4495			BPF_MOV64_IMM(R3, 47),
   4496			BPF_ALU64_REG(BPF_ARSH, R0, R3),
   4497			BPF_JMP_IMM(BPF_JEQ, R0, -617, 1),
   4498			BPF_EXIT_INSN(),
   4499			BPF_MOV64_IMM(R2, 1),
   4500			BPF_ALU64_REG(BPF_LSH, R4, R2), /* R4 = 46 << 1 */
   4501			BPF_JMP_IMM(BPF_JEQ, R4, 92, 1),
   4502			BPF_EXIT_INSN(),
   4503			BPF_MOV64_IMM(R4, 4),
   4504			BPF_ALU64_REG(BPF_LSH, R4, R4), /* R4 = 4 << 4 */
   4505			BPF_JMP_IMM(BPF_JEQ, R4, 64, 1),
   4506			BPF_EXIT_INSN(),
   4507			BPF_MOV64_IMM(R4, 5),
   4508			BPF_ALU32_REG(BPF_LSH, R4, R4), /* R4 = 5 << 5 */
   4509			BPF_JMP_IMM(BPF_JEQ, R4, 160, 1),
   4510			BPF_EXIT_INSN(),
   4511			BPF_MOV64_IMM(R0, -1),
   4512			BPF_EXIT_INSN(),
   4513		},
   4514		INTERNAL,
   4515		{ },
   4516		{ { 0, -1 } }
   4517	},
   4518#ifdef CONFIG_32BIT
   4519	{
   4520		"INT: 32-bit context pointer word order and zero-extension",
   4521		.u.insns_int = {
   4522			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   4523			BPF_JMP32_IMM(BPF_JEQ, R1, 0, 3),
   4524			BPF_ALU64_IMM(BPF_RSH, R1, 32),
   4525			BPF_JMP32_IMM(BPF_JNE, R1, 0, 1),
   4526			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   4527			BPF_EXIT_INSN(),
   4528		},
   4529		INTERNAL,
   4530		{ },
   4531		{ { 0, 1 } }
   4532	},
   4533#endif
   4534	{
   4535		"check: missing ret",
   4536		.u.insns = {
   4537			BPF_STMT(BPF_LD | BPF_IMM, 1),
   4538		},
   4539		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
   4540		{ },
   4541		{ },
   4542		.fill_helper = NULL,
   4543		.expected_errcode = -EINVAL,
   4544	},
   4545	{
   4546		"check: div_k_0",
   4547		.u.insns = {
   4548			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0),
   4549			BPF_STMT(BPF_RET | BPF_K, 0)
   4550		},
   4551		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
   4552		{ },
   4553		{ },
   4554		.fill_helper = NULL,
   4555		.expected_errcode = -EINVAL,
   4556	},
   4557	{
   4558		"check: unknown insn",
   4559		.u.insns = {
   4560			/* seccomp insn, rejected in socket filter */
   4561			BPF_STMT(BPF_LDX | BPF_W | BPF_ABS, 0),
   4562			BPF_STMT(BPF_RET | BPF_K, 0)
   4563		},
   4564		CLASSIC | FLAG_EXPECTED_FAIL,
   4565		{ },
   4566		{ },
   4567		.fill_helper = NULL,
   4568		.expected_errcode = -EINVAL,
   4569	},
   4570	{
   4571		"check: out of range spill/fill",
   4572		.u.insns = {
   4573			BPF_STMT(BPF_STX, 16),
   4574			BPF_STMT(BPF_RET | BPF_K, 0)
   4575		},
   4576		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
   4577		{ },
   4578		{ },
   4579		.fill_helper = NULL,
   4580		.expected_errcode = -EINVAL,
   4581	},
   4582	{
   4583		"JUMPS + HOLES",
   4584		.u.insns = {
   4585			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4586			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 15),
   4587			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4588			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4589			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4590			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4591			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4592			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4593			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4594			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4595			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4596			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4597			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4598			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4599			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4600			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 3, 4),
   4601			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4602			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 1, 2),
   4603			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4604			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
   4605			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
   4606			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4607			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4608			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4609			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4610			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4611			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4612			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4613			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4614			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4615			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4616			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4617			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4618			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4619			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 2, 3),
   4620			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 1, 2),
   4621			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4622			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
   4623			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
   4624			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4625			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4626			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4627			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4628			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4629			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4630			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4631			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4632			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4633			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4634			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4635			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4636			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4637			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 2, 3),
   4638			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 1, 2),
   4639			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
   4640			BPF_STMT(BPF_RET | BPF_A, 0),
   4641			BPF_STMT(BPF_RET | BPF_A, 0),
   4642		},
   4643		CLASSIC,
   4644		{ 0x00, 0x1b, 0x21, 0x3c, 0x9d, 0xf8,
   4645		  0x90, 0xe2, 0xba, 0x0a, 0x56, 0xb4,
   4646		  0x08, 0x00,
   4647		  0x45, 0x00, 0x00, 0x28, 0x00, 0x00,
   4648		  0x20, 0x00, 0x40, 0x11, 0x00, 0x00, /* IP header */
   4649		  0xc0, 0xa8, 0x33, 0x01,
   4650		  0xc0, 0xa8, 0x33, 0x02,
   4651		  0xbb, 0xb6,
   4652		  0xa9, 0xfa,
   4653		  0x00, 0x14, 0x00, 0x00,
   4654		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
   4655		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
   4656		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
   4657		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
   4658		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
   4659		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
   4660		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
   4661		  0xcc, 0xcc, 0xcc, 0xcc },
   4662		{ { 88, 0x001b } }
   4663	},
   4664	{
   4665		"check: RET X",
   4666		.u.insns = {
   4667			BPF_STMT(BPF_RET | BPF_X, 0),
   4668		},
   4669		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
   4670		{ },
   4671		{ },
   4672		.fill_helper = NULL,
   4673		.expected_errcode = -EINVAL,
   4674	},
   4675	{
   4676		"check: LDX + RET X",
   4677		.u.insns = {
   4678			BPF_STMT(BPF_LDX | BPF_IMM, 42),
   4679			BPF_STMT(BPF_RET | BPF_X, 0),
   4680		},
   4681		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
   4682		{ },
   4683		{ },
   4684		.fill_helper = NULL,
   4685		.expected_errcode = -EINVAL,
   4686	},
   4687	{	/* Mainly checking JIT here. */
   4688		"M[]: alt STX + LDX",
   4689		.u.insns = {
   4690			BPF_STMT(BPF_LDX | BPF_IMM, 100),
   4691			BPF_STMT(BPF_STX, 0),
   4692			BPF_STMT(BPF_LDX | BPF_MEM, 0),
   4693			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4694			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4695			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4696			BPF_STMT(BPF_STX, 1),
   4697			BPF_STMT(BPF_LDX | BPF_MEM, 1),
   4698			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4699			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4700			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4701			BPF_STMT(BPF_STX, 2),
   4702			BPF_STMT(BPF_LDX | BPF_MEM, 2),
   4703			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4704			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4705			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4706			BPF_STMT(BPF_STX, 3),
   4707			BPF_STMT(BPF_LDX | BPF_MEM, 3),
   4708			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4709			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4710			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4711			BPF_STMT(BPF_STX, 4),
   4712			BPF_STMT(BPF_LDX | BPF_MEM, 4),
   4713			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4714			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4715			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4716			BPF_STMT(BPF_STX, 5),
   4717			BPF_STMT(BPF_LDX | BPF_MEM, 5),
   4718			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4719			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4720			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4721			BPF_STMT(BPF_STX, 6),
   4722			BPF_STMT(BPF_LDX | BPF_MEM, 6),
   4723			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4724			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4725			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4726			BPF_STMT(BPF_STX, 7),
   4727			BPF_STMT(BPF_LDX | BPF_MEM, 7),
   4728			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4729			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4730			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4731			BPF_STMT(BPF_STX, 8),
   4732			BPF_STMT(BPF_LDX | BPF_MEM, 8),
   4733			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4734			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4735			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4736			BPF_STMT(BPF_STX, 9),
   4737			BPF_STMT(BPF_LDX | BPF_MEM, 9),
   4738			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4739			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4740			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4741			BPF_STMT(BPF_STX, 10),
   4742			BPF_STMT(BPF_LDX | BPF_MEM, 10),
   4743			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4744			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4745			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4746			BPF_STMT(BPF_STX, 11),
   4747			BPF_STMT(BPF_LDX | BPF_MEM, 11),
   4748			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4749			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4750			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4751			BPF_STMT(BPF_STX, 12),
   4752			BPF_STMT(BPF_LDX | BPF_MEM, 12),
   4753			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4754			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4755			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4756			BPF_STMT(BPF_STX, 13),
   4757			BPF_STMT(BPF_LDX | BPF_MEM, 13),
   4758			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4759			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4760			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4761			BPF_STMT(BPF_STX, 14),
   4762			BPF_STMT(BPF_LDX | BPF_MEM, 14),
   4763			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4764			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4765			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4766			BPF_STMT(BPF_STX, 15),
   4767			BPF_STMT(BPF_LDX | BPF_MEM, 15),
   4768			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4769			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
   4770			BPF_STMT(BPF_MISC | BPF_TAX, 0),
   4771			BPF_STMT(BPF_RET | BPF_A, 0),
   4772		},
   4773		CLASSIC | FLAG_NO_DATA,
   4774		{ },
   4775		{ { 0, 116 } },
   4776	},
   4777	{	/* Mainly checking JIT here. */
   4778		"M[]: full STX + full LDX",
   4779		.u.insns = {
   4780			BPF_STMT(BPF_LDX | BPF_IMM, 0xbadfeedb),
   4781			BPF_STMT(BPF_STX, 0),
   4782			BPF_STMT(BPF_LDX | BPF_IMM, 0xecabedae),
   4783			BPF_STMT(BPF_STX, 1),
   4784			BPF_STMT(BPF_LDX | BPF_IMM, 0xafccfeaf),
   4785			BPF_STMT(BPF_STX, 2),
   4786			BPF_STMT(BPF_LDX | BPF_IMM, 0xbffdcedc),
   4787			BPF_STMT(BPF_STX, 3),
   4788			BPF_STMT(BPF_LDX | BPF_IMM, 0xfbbbdccb),
   4789			BPF_STMT(BPF_STX, 4),
   4790			BPF_STMT(BPF_LDX | BPF_IMM, 0xfbabcbda),
   4791			BPF_STMT(BPF_STX, 5),
   4792			BPF_STMT(BPF_LDX | BPF_IMM, 0xaedecbdb),
   4793			BPF_STMT(BPF_STX, 6),
   4794			BPF_STMT(BPF_LDX | BPF_IMM, 0xadebbade),
   4795			BPF_STMT(BPF_STX, 7),
   4796			BPF_STMT(BPF_LDX | BPF_IMM, 0xfcfcfaec),
   4797			BPF_STMT(BPF_STX, 8),
   4798			BPF_STMT(BPF_LDX | BPF_IMM, 0xbcdddbdc),
   4799			BPF_STMT(BPF_STX, 9),
   4800			BPF_STMT(BPF_LDX | BPF_IMM, 0xfeefdfac),
   4801			BPF_STMT(BPF_STX, 10),
   4802			BPF_STMT(BPF_LDX | BPF_IMM, 0xcddcdeea),
   4803			BPF_STMT(BPF_STX, 11),
   4804			BPF_STMT(BPF_LDX | BPF_IMM, 0xaccfaebb),
   4805			BPF_STMT(BPF_STX, 12),
   4806			BPF_STMT(BPF_LDX | BPF_IMM, 0xbdcccdcf),
   4807			BPF_STMT(BPF_STX, 13),
   4808			BPF_STMT(BPF_LDX | BPF_IMM, 0xaaedecde),
   4809			BPF_STMT(BPF_STX, 14),
   4810			BPF_STMT(BPF_LDX | BPF_IMM, 0xfaeacdad),
   4811			BPF_STMT(BPF_STX, 15),
   4812			BPF_STMT(BPF_LDX | BPF_MEM, 0),
   4813			BPF_STMT(BPF_MISC | BPF_TXA, 0),
   4814			BPF_STMT(BPF_LDX | BPF_MEM, 1),
   4815			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4816			BPF_STMT(BPF_LDX | BPF_MEM, 2),
   4817			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4818			BPF_STMT(BPF_LDX | BPF_MEM, 3),
   4819			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4820			BPF_STMT(BPF_LDX | BPF_MEM, 4),
   4821			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4822			BPF_STMT(BPF_LDX | BPF_MEM, 5),
   4823			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4824			BPF_STMT(BPF_LDX | BPF_MEM, 6),
   4825			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4826			BPF_STMT(BPF_LDX | BPF_MEM, 7),
   4827			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4828			BPF_STMT(BPF_LDX | BPF_MEM, 8),
   4829			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4830			BPF_STMT(BPF_LDX | BPF_MEM, 9),
   4831			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4832			BPF_STMT(BPF_LDX | BPF_MEM, 10),
   4833			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4834			BPF_STMT(BPF_LDX | BPF_MEM, 11),
   4835			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4836			BPF_STMT(BPF_LDX | BPF_MEM, 12),
   4837			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4838			BPF_STMT(BPF_LDX | BPF_MEM, 13),
   4839			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4840			BPF_STMT(BPF_LDX | BPF_MEM, 14),
   4841			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4842			BPF_STMT(BPF_LDX | BPF_MEM, 15),
   4843			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
   4844			BPF_STMT(BPF_RET | BPF_A, 0),
   4845		},
   4846		CLASSIC | FLAG_NO_DATA,
   4847		{ },
   4848		{ { 0, 0x2a5a5e5 } },
   4849	},
   4850	{
   4851		"check: SKF_AD_MAX",
   4852		.u.insns = {
   4853			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   4854				 SKF_AD_OFF + SKF_AD_MAX),
   4855			BPF_STMT(BPF_RET | BPF_A, 0),
   4856		},
   4857		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
   4858		{ },
   4859		{ },
   4860		.fill_helper = NULL,
   4861		.expected_errcode = -EINVAL,
   4862	},
   4863	{	/* Passes checker but fails during runtime. */
   4864		"LD [SKF_AD_OFF-1]",
   4865		.u.insns = {
   4866			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
   4867				 SKF_AD_OFF - 1),
   4868			BPF_STMT(BPF_RET | BPF_K, 1),
   4869		},
   4870		CLASSIC,
   4871		{ },
   4872		{ { 1, 0 } },
   4873	},
   4874	{
   4875		"load 64-bit immediate",
   4876		.u.insns_int = {
   4877			BPF_LD_IMM64(R1, 0x567800001234LL),
   4878			BPF_MOV64_REG(R2, R1),
   4879			BPF_MOV64_REG(R3, R2),
   4880			BPF_ALU64_IMM(BPF_RSH, R2, 32),
   4881			BPF_ALU64_IMM(BPF_LSH, R3, 32),
   4882			BPF_ALU64_IMM(BPF_RSH, R3, 32),
   4883			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   4884			BPF_JMP_IMM(BPF_JEQ, R2, 0x5678, 1),
   4885			BPF_EXIT_INSN(),
   4886			BPF_JMP_IMM(BPF_JEQ, R3, 0x1234, 1),
   4887			BPF_EXIT_INSN(),
   4888			BPF_LD_IMM64(R0, 0x1ffffffffLL),
   4889			BPF_ALU64_IMM(BPF_RSH, R0, 32), /* R0 = 1 */
   4890			BPF_EXIT_INSN(),
   4891		},
   4892		INTERNAL,
   4893		{ },
   4894		{ { 0, 1 } }
   4895	},
   4896	/* BPF_ALU | BPF_MOV | BPF_X */
   4897	{
   4898		"ALU_MOV_X: dst = 2",
   4899		.u.insns_int = {
   4900			BPF_ALU32_IMM(BPF_MOV, R1, 2),
   4901			BPF_ALU32_REG(BPF_MOV, R0, R1),
   4902			BPF_EXIT_INSN(),
   4903		},
   4904		INTERNAL,
   4905		{ },
   4906		{ { 0, 2 } },
   4907	},
   4908	{
   4909		"ALU_MOV_X: dst = 4294967295",
   4910		.u.insns_int = {
   4911			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
   4912			BPF_ALU32_REG(BPF_MOV, R0, R1),
   4913			BPF_EXIT_INSN(),
   4914		},
   4915		INTERNAL,
   4916		{ },
   4917		{ { 0, 4294967295U } },
   4918	},
   4919	{
   4920		"ALU64_MOV_X: dst = 2",
   4921		.u.insns_int = {
   4922			BPF_ALU32_IMM(BPF_MOV, R1, 2),
   4923			BPF_ALU64_REG(BPF_MOV, R0, R1),
   4924			BPF_EXIT_INSN(),
   4925		},
   4926		INTERNAL,
   4927		{ },
   4928		{ { 0, 2 } },
   4929	},
   4930	{
   4931		"ALU64_MOV_X: dst = 4294967295",
   4932		.u.insns_int = {
   4933			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
   4934			BPF_ALU64_REG(BPF_MOV, R0, R1),
   4935			BPF_EXIT_INSN(),
   4936		},
   4937		INTERNAL,
   4938		{ },
   4939		{ { 0, 4294967295U } },
   4940	},
   4941	/* BPF_ALU | BPF_MOV | BPF_K */
   4942	{
   4943		"ALU_MOV_K: dst = 2",
   4944		.u.insns_int = {
   4945			BPF_ALU32_IMM(BPF_MOV, R0, 2),
   4946			BPF_EXIT_INSN(),
   4947		},
   4948		INTERNAL,
   4949		{ },
   4950		{ { 0, 2 } },
   4951	},
   4952	{
   4953		"ALU_MOV_K: dst = 4294967295",
   4954		.u.insns_int = {
   4955			BPF_ALU32_IMM(BPF_MOV, R0, 4294967295U),
   4956			BPF_EXIT_INSN(),
   4957		},
   4958		INTERNAL,
   4959		{ },
   4960		{ { 0, 4294967295U } },
   4961	},
   4962	{
   4963		"ALU_MOV_K: 0x0000ffffffff0000 = 0x00000000ffffffff",
   4964		.u.insns_int = {
   4965			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
   4966			BPF_LD_IMM64(R3, 0x00000000ffffffffLL),
   4967			BPF_ALU32_IMM(BPF_MOV, R2, 0xffffffff),
   4968			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   4969			BPF_MOV32_IMM(R0, 2),
   4970			BPF_EXIT_INSN(),
   4971			BPF_MOV32_IMM(R0, 1),
   4972			BPF_EXIT_INSN(),
   4973		},
   4974		INTERNAL,
   4975		{ },
   4976		{ { 0, 0x1 } },
   4977	},
   4978	{
   4979		"ALU_MOV_K: small negative",
   4980		.u.insns_int = {
   4981			BPF_ALU32_IMM(BPF_MOV, R0, -123),
   4982			BPF_EXIT_INSN(),
   4983		},
   4984		INTERNAL,
   4985		{ },
   4986		{ { 0, -123 } }
   4987	},
   4988	{
   4989		"ALU_MOV_K: small negative zero extension",
   4990		.u.insns_int = {
   4991			BPF_ALU32_IMM(BPF_MOV, R0, -123),
   4992			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   4993			BPF_EXIT_INSN(),
   4994		},
   4995		INTERNAL,
   4996		{ },
   4997		{ { 0, 0 } }
   4998	},
   4999	{
   5000		"ALU_MOV_K: large negative",
   5001		.u.insns_int = {
   5002			BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
   5003			BPF_EXIT_INSN(),
   5004		},
   5005		INTERNAL,
   5006		{ },
   5007		{ { 0, -123456789 } }
   5008	},
   5009	{
   5010		"ALU_MOV_K: large negative zero extension",
   5011		.u.insns_int = {
   5012			BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
   5013			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   5014			BPF_EXIT_INSN(),
   5015		},
   5016		INTERNAL,
   5017		{ },
   5018		{ { 0, 0 } }
   5019	},
   5020	{
   5021		"ALU64_MOV_K: dst = 2",
   5022		.u.insns_int = {
   5023			BPF_ALU64_IMM(BPF_MOV, R0, 2),
   5024			BPF_EXIT_INSN(),
   5025		},
   5026		INTERNAL,
   5027		{ },
   5028		{ { 0, 2 } },
   5029	},
   5030	{
   5031		"ALU64_MOV_K: dst = 2147483647",
   5032		.u.insns_int = {
   5033			BPF_ALU64_IMM(BPF_MOV, R0, 2147483647),
   5034			BPF_EXIT_INSN(),
   5035		},
   5036		INTERNAL,
   5037		{ },
   5038		{ { 0, 2147483647 } },
   5039	},
   5040	{
   5041		"ALU64_OR_K: dst = 0x0",
   5042		.u.insns_int = {
   5043			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
   5044			BPF_LD_IMM64(R3, 0x0),
   5045			BPF_ALU64_IMM(BPF_MOV, R2, 0x0),
   5046			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5047			BPF_MOV32_IMM(R0, 2),
   5048			BPF_EXIT_INSN(),
   5049			BPF_MOV32_IMM(R0, 1),
   5050			BPF_EXIT_INSN(),
   5051		},
   5052		INTERNAL,
   5053		{ },
   5054		{ { 0, 0x1 } },
   5055	},
   5056	{
   5057		"ALU64_MOV_K: dst = -1",
   5058		.u.insns_int = {
   5059			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
   5060			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
   5061			BPF_ALU64_IMM(BPF_MOV, R2, 0xffffffff),
   5062			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5063			BPF_MOV32_IMM(R0, 2),
   5064			BPF_EXIT_INSN(),
   5065			BPF_MOV32_IMM(R0, 1),
   5066			BPF_EXIT_INSN(),
   5067		},
   5068		INTERNAL,
   5069		{ },
   5070		{ { 0, 0x1 } },
   5071	},
   5072	{
   5073		"ALU64_MOV_K: small negative",
   5074		.u.insns_int = {
   5075			BPF_ALU64_IMM(BPF_MOV, R0, -123),
   5076			BPF_EXIT_INSN(),
   5077		},
   5078		INTERNAL,
   5079		{ },
   5080		{ { 0, -123 } }
   5081	},
   5082	{
   5083		"ALU64_MOV_K: small negative sign extension",
   5084		.u.insns_int = {
   5085			BPF_ALU64_IMM(BPF_MOV, R0, -123),
   5086			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   5087			BPF_EXIT_INSN(),
   5088		},
   5089		INTERNAL,
   5090		{ },
   5091		{ { 0, 0xffffffff } }
   5092	},
   5093	{
   5094		"ALU64_MOV_K: large negative",
   5095		.u.insns_int = {
   5096			BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
   5097			BPF_EXIT_INSN(),
   5098		},
   5099		INTERNAL,
   5100		{ },
   5101		{ { 0, -123456789 } }
   5102	},
   5103	{
   5104		"ALU64_MOV_K: large negative sign extension",
   5105		.u.insns_int = {
   5106			BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
   5107			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   5108			BPF_EXIT_INSN(),
   5109		},
   5110		INTERNAL,
   5111		{ },
   5112		{ { 0, 0xffffffff } }
   5113	},
   5114	/* BPF_ALU | BPF_ADD | BPF_X */
   5115	{
   5116		"ALU_ADD_X: 1 + 2 = 3",
   5117		.u.insns_int = {
   5118			BPF_LD_IMM64(R0, 1),
   5119			BPF_ALU32_IMM(BPF_MOV, R1, 2),
   5120			BPF_ALU32_REG(BPF_ADD, R0, R1),
   5121			BPF_EXIT_INSN(),
   5122		},
   5123		INTERNAL,
   5124		{ },
   5125		{ { 0, 3 } },
   5126	},
   5127	{
   5128		"ALU_ADD_X: 1 + 4294967294 = 4294967295",
   5129		.u.insns_int = {
   5130			BPF_LD_IMM64(R0, 1),
   5131			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
   5132			BPF_ALU32_REG(BPF_ADD, R0, R1),
   5133			BPF_EXIT_INSN(),
   5134		},
   5135		INTERNAL,
   5136		{ },
   5137		{ { 0, 4294967295U } },
   5138	},
   5139	{
   5140		"ALU_ADD_X: 2 + 4294967294 = 0",
   5141		.u.insns_int = {
   5142			BPF_LD_IMM64(R0, 2),
   5143			BPF_LD_IMM64(R1, 4294967294U),
   5144			BPF_ALU32_REG(BPF_ADD, R0, R1),
   5145			BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
   5146			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   5147			BPF_EXIT_INSN(),
   5148			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   5149			BPF_EXIT_INSN(),
   5150		},
   5151		INTERNAL,
   5152		{ },
   5153		{ { 0, 1 } },
   5154	},
   5155	{
   5156		"ALU64_ADD_X: 1 + 2 = 3",
   5157		.u.insns_int = {
   5158			BPF_LD_IMM64(R0, 1),
   5159			BPF_ALU32_IMM(BPF_MOV, R1, 2),
   5160			BPF_ALU64_REG(BPF_ADD, R0, R1),
   5161			BPF_EXIT_INSN(),
   5162		},
   5163		INTERNAL,
   5164		{ },
   5165		{ { 0, 3 } },
   5166	},
   5167	{
   5168		"ALU64_ADD_X: 1 + 4294967294 = 4294967295",
   5169		.u.insns_int = {
   5170			BPF_LD_IMM64(R0, 1),
   5171			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
   5172			BPF_ALU64_REG(BPF_ADD, R0, R1),
   5173			BPF_EXIT_INSN(),
   5174		},
   5175		INTERNAL,
   5176		{ },
   5177		{ { 0, 4294967295U } },
   5178	},
   5179	{
   5180		"ALU64_ADD_X: 2 + 4294967294 = 4294967296",
   5181		.u.insns_int = {
   5182			BPF_LD_IMM64(R0, 2),
   5183			BPF_LD_IMM64(R1, 4294967294U),
   5184			BPF_LD_IMM64(R2, 4294967296ULL),
   5185			BPF_ALU64_REG(BPF_ADD, R0, R1),
   5186			BPF_JMP_REG(BPF_JEQ, R0, R2, 2),
   5187			BPF_MOV32_IMM(R0, 0),
   5188			BPF_EXIT_INSN(),
   5189			BPF_MOV32_IMM(R0, 1),
   5190			BPF_EXIT_INSN(),
   5191		},
   5192		INTERNAL,
   5193		{ },
   5194		{ { 0, 1 } },
   5195	},
   5196	/* BPF_ALU | BPF_ADD | BPF_K */
   5197	{
   5198		"ALU_ADD_K: 1 + 2 = 3",
   5199		.u.insns_int = {
   5200			BPF_LD_IMM64(R0, 1),
   5201			BPF_ALU32_IMM(BPF_ADD, R0, 2),
   5202			BPF_EXIT_INSN(),
   5203		},
   5204		INTERNAL,
   5205		{ },
   5206		{ { 0, 3 } },
   5207	},
   5208	{
   5209		"ALU_ADD_K: 3 + 0 = 3",
   5210		.u.insns_int = {
   5211			BPF_LD_IMM64(R0, 3),
   5212			BPF_ALU32_IMM(BPF_ADD, R0, 0),
   5213			BPF_EXIT_INSN(),
   5214		},
   5215		INTERNAL,
   5216		{ },
   5217		{ { 0, 3 } },
   5218	},
   5219	{
   5220		"ALU_ADD_K: 1 + 4294967294 = 4294967295",
   5221		.u.insns_int = {
   5222			BPF_LD_IMM64(R0, 1),
   5223			BPF_ALU32_IMM(BPF_ADD, R0, 4294967294U),
   5224			BPF_EXIT_INSN(),
   5225		},
   5226		INTERNAL,
   5227		{ },
   5228		{ { 0, 4294967295U } },
   5229	},
   5230	{
   5231		"ALU_ADD_K: 4294967294 + 2 = 0",
   5232		.u.insns_int = {
   5233			BPF_LD_IMM64(R0, 4294967294U),
   5234			BPF_ALU32_IMM(BPF_ADD, R0, 2),
   5235			BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
   5236			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   5237			BPF_EXIT_INSN(),
   5238			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   5239			BPF_EXIT_INSN(),
   5240		},
   5241		INTERNAL,
   5242		{ },
   5243		{ { 0, 1 } },
   5244	},
   5245	{
   5246		"ALU_ADD_K: 0 + (-1) = 0x00000000ffffffff",
   5247		.u.insns_int = {
   5248			BPF_LD_IMM64(R2, 0x0),
   5249			BPF_LD_IMM64(R3, 0x00000000ffffffff),
   5250			BPF_ALU32_IMM(BPF_ADD, R2, 0xffffffff),
   5251			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5252			BPF_MOV32_IMM(R0, 2),
   5253			BPF_EXIT_INSN(),
   5254			BPF_MOV32_IMM(R0, 1),
   5255			BPF_EXIT_INSN(),
   5256		},
   5257		INTERNAL,
   5258		{ },
   5259		{ { 0, 0x1 } },
   5260	},
   5261	{
   5262		"ALU_ADD_K: 0 + 0xffff = 0xffff",
   5263		.u.insns_int = {
   5264			BPF_LD_IMM64(R2, 0x0),
   5265			BPF_LD_IMM64(R3, 0xffff),
   5266			BPF_ALU32_IMM(BPF_ADD, R2, 0xffff),
   5267			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5268			BPF_MOV32_IMM(R0, 2),
   5269			BPF_EXIT_INSN(),
   5270			BPF_MOV32_IMM(R0, 1),
   5271			BPF_EXIT_INSN(),
   5272		},
   5273		INTERNAL,
   5274		{ },
   5275		{ { 0, 0x1 } },
   5276	},
   5277	{
   5278		"ALU_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
   5279		.u.insns_int = {
   5280			BPF_LD_IMM64(R2, 0x0),
   5281			BPF_LD_IMM64(R3, 0x7fffffff),
   5282			BPF_ALU32_IMM(BPF_ADD, R2, 0x7fffffff),
   5283			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5284			BPF_MOV32_IMM(R0, 2),
   5285			BPF_EXIT_INSN(),
   5286			BPF_MOV32_IMM(R0, 1),
   5287			BPF_EXIT_INSN(),
   5288		},
   5289		INTERNAL,
   5290		{ },
   5291		{ { 0, 0x1 } },
   5292	},
   5293	{
   5294		"ALU_ADD_K: 0 + 0x80000000 = 0x80000000",
   5295		.u.insns_int = {
   5296			BPF_LD_IMM64(R2, 0x0),
   5297			BPF_LD_IMM64(R3, 0x80000000),
   5298			BPF_ALU32_IMM(BPF_ADD, R2, 0x80000000),
   5299			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5300			BPF_MOV32_IMM(R0, 2),
   5301			BPF_EXIT_INSN(),
   5302			BPF_MOV32_IMM(R0, 1),
   5303			BPF_EXIT_INSN(),
   5304		},
   5305		INTERNAL,
   5306		{ },
   5307		{ { 0, 0x1 } },
   5308	},
   5309	{
   5310		"ALU_ADD_K: 0 + 0x80008000 = 0x80008000",
   5311		.u.insns_int = {
   5312			BPF_LD_IMM64(R2, 0x0),
   5313			BPF_LD_IMM64(R3, 0x80008000),
   5314			BPF_ALU32_IMM(BPF_ADD, R2, 0x80008000),
   5315			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5316			BPF_MOV32_IMM(R0, 2),
   5317			BPF_EXIT_INSN(),
   5318			BPF_MOV32_IMM(R0, 1),
   5319			BPF_EXIT_INSN(),
   5320		},
   5321		INTERNAL,
   5322		{ },
   5323		{ { 0, 0x1 } },
   5324	},
   5325	{
   5326		"ALU64_ADD_K: 1 + 2 = 3",
   5327		.u.insns_int = {
   5328			BPF_LD_IMM64(R0, 1),
   5329			BPF_ALU64_IMM(BPF_ADD, R0, 2),
   5330			BPF_EXIT_INSN(),
   5331		},
   5332		INTERNAL,
   5333		{ },
   5334		{ { 0, 3 } },
   5335	},
   5336	{
   5337		"ALU64_ADD_K: 3 + 0 = 3",
   5338		.u.insns_int = {
   5339			BPF_LD_IMM64(R0, 3),
   5340			BPF_ALU64_IMM(BPF_ADD, R0, 0),
   5341			BPF_EXIT_INSN(),
   5342		},
   5343		INTERNAL,
   5344		{ },
   5345		{ { 0, 3 } },
   5346	},
   5347	{
   5348		"ALU64_ADD_K: 1 + 2147483646 = 2147483647",
   5349		.u.insns_int = {
   5350			BPF_LD_IMM64(R0, 1),
   5351			BPF_ALU64_IMM(BPF_ADD, R0, 2147483646),
   5352			BPF_EXIT_INSN(),
   5353		},
   5354		INTERNAL,
   5355		{ },
   5356		{ { 0, 2147483647 } },
   5357	},
   5358	{
   5359		"ALU64_ADD_K: 4294967294 + 2 = 4294967296",
   5360		.u.insns_int = {
   5361			BPF_LD_IMM64(R0, 4294967294U),
   5362			BPF_LD_IMM64(R1, 4294967296ULL),
   5363			BPF_ALU64_IMM(BPF_ADD, R0, 2),
   5364			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
   5365			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   5366			BPF_EXIT_INSN(),
   5367			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   5368			BPF_EXIT_INSN(),
   5369		},
   5370		INTERNAL,
   5371		{ },
   5372		{ { 0, 1 } },
   5373	},
   5374	{
   5375		"ALU64_ADD_K: 2147483646 + -2147483647 = -1",
   5376		.u.insns_int = {
   5377			BPF_LD_IMM64(R0, 2147483646),
   5378			BPF_ALU64_IMM(BPF_ADD, R0, -2147483647),
   5379			BPF_EXIT_INSN(),
   5380		},
   5381		INTERNAL,
   5382		{ },
   5383		{ { 0, -1 } },
   5384	},
   5385	{
   5386		"ALU64_ADD_K: 1 + 0 = 1",
   5387		.u.insns_int = {
   5388			BPF_LD_IMM64(R2, 0x1),
   5389			BPF_LD_IMM64(R3, 0x1),
   5390			BPF_ALU64_IMM(BPF_ADD, R2, 0x0),
   5391			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5392			BPF_MOV32_IMM(R0, 2),
   5393			BPF_EXIT_INSN(),
   5394			BPF_MOV32_IMM(R0, 1),
   5395			BPF_EXIT_INSN(),
   5396		},
   5397		INTERNAL,
   5398		{ },
   5399		{ { 0, 0x1 } },
   5400	},
   5401	{
   5402		"ALU64_ADD_K: 0 + (-1) = 0xffffffffffffffff",
   5403		.u.insns_int = {
   5404			BPF_LD_IMM64(R2, 0x0),
   5405			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
   5406			BPF_ALU64_IMM(BPF_ADD, R2, 0xffffffff),
   5407			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5408			BPF_MOV32_IMM(R0, 2),
   5409			BPF_EXIT_INSN(),
   5410			BPF_MOV32_IMM(R0, 1),
   5411			BPF_EXIT_INSN(),
   5412		},
   5413		INTERNAL,
   5414		{ },
   5415		{ { 0, 0x1 } },
   5416	},
   5417	{
   5418		"ALU64_ADD_K: 0 + 0xffff = 0xffff",
   5419		.u.insns_int = {
   5420			BPF_LD_IMM64(R2, 0x0),
   5421			BPF_LD_IMM64(R3, 0xffff),
   5422			BPF_ALU64_IMM(BPF_ADD, R2, 0xffff),
   5423			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5424			BPF_MOV32_IMM(R0, 2),
   5425			BPF_EXIT_INSN(),
   5426			BPF_MOV32_IMM(R0, 1),
   5427			BPF_EXIT_INSN(),
   5428		},
   5429		INTERNAL,
   5430		{ },
   5431		{ { 0, 0x1 } },
   5432	},
   5433	{
   5434		"ALU64_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
   5435		.u.insns_int = {
   5436			BPF_LD_IMM64(R2, 0x0),
   5437			BPF_LD_IMM64(R3, 0x7fffffff),
   5438			BPF_ALU64_IMM(BPF_ADD, R2, 0x7fffffff),
   5439			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5440			BPF_MOV32_IMM(R0, 2),
   5441			BPF_EXIT_INSN(),
   5442			BPF_MOV32_IMM(R0, 1),
   5443			BPF_EXIT_INSN(),
   5444		},
   5445		INTERNAL,
   5446		{ },
   5447		{ { 0, 0x1 } },
   5448	},
   5449	{
   5450		"ALU64_ADD_K: 0 + 0x80000000 = 0xffffffff80000000",
   5451		.u.insns_int = {
   5452			BPF_LD_IMM64(R2, 0x0),
   5453			BPF_LD_IMM64(R3, 0xffffffff80000000LL),
   5454			BPF_ALU64_IMM(BPF_ADD, R2, 0x80000000),
   5455			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5456			BPF_MOV32_IMM(R0, 2),
   5457			BPF_EXIT_INSN(),
   5458			BPF_MOV32_IMM(R0, 1),
   5459			BPF_EXIT_INSN(),
   5460		},
   5461		INTERNAL,
   5462		{ },
   5463		{ { 0, 0x1 } },
   5464	},
   5465	{
   5466		"ALU_ADD_K: 0 + 0x80008000 = 0xffffffff80008000",
   5467		.u.insns_int = {
   5468			BPF_LD_IMM64(R2, 0x0),
   5469			BPF_LD_IMM64(R3, 0xffffffff80008000LL),
   5470			BPF_ALU64_IMM(BPF_ADD, R2, 0x80008000),
   5471			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5472			BPF_MOV32_IMM(R0, 2),
   5473			BPF_EXIT_INSN(),
   5474			BPF_MOV32_IMM(R0, 1),
   5475			BPF_EXIT_INSN(),
   5476		},
   5477		INTERNAL,
   5478		{ },
   5479		{ { 0, 0x1 } },
   5480	},
   5481	/* BPF_ALU | BPF_SUB | BPF_X */
   5482	{
   5483		"ALU_SUB_X: 3 - 1 = 2",
   5484		.u.insns_int = {
   5485			BPF_LD_IMM64(R0, 3),
   5486			BPF_ALU32_IMM(BPF_MOV, R1, 1),
   5487			BPF_ALU32_REG(BPF_SUB, R0, R1),
   5488			BPF_EXIT_INSN(),
   5489		},
   5490		INTERNAL,
   5491		{ },
   5492		{ { 0, 2 } },
   5493	},
   5494	{
   5495		"ALU_SUB_X: 4294967295 - 4294967294 = 1",
   5496		.u.insns_int = {
   5497			BPF_LD_IMM64(R0, 4294967295U),
   5498			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
   5499			BPF_ALU32_REG(BPF_SUB, R0, R1),
   5500			BPF_EXIT_INSN(),
   5501		},
   5502		INTERNAL,
   5503		{ },
   5504		{ { 0, 1 } },
   5505	},
   5506	{
   5507		"ALU64_SUB_X: 3 - 1 = 2",
   5508		.u.insns_int = {
   5509			BPF_LD_IMM64(R0, 3),
   5510			BPF_ALU32_IMM(BPF_MOV, R1, 1),
   5511			BPF_ALU64_REG(BPF_SUB, R0, R1),
   5512			BPF_EXIT_INSN(),
   5513		},
   5514		INTERNAL,
   5515		{ },
   5516		{ { 0, 2 } },
   5517	},
   5518	{
   5519		"ALU64_SUB_X: 4294967295 - 4294967294 = 1",
   5520		.u.insns_int = {
   5521			BPF_LD_IMM64(R0, 4294967295U),
   5522			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
   5523			BPF_ALU64_REG(BPF_SUB, R0, R1),
   5524			BPF_EXIT_INSN(),
   5525		},
   5526		INTERNAL,
   5527		{ },
   5528		{ { 0, 1 } },
   5529	},
   5530	/* BPF_ALU | BPF_SUB | BPF_K */
   5531	{
   5532		"ALU_SUB_K: 3 - 1 = 2",
   5533		.u.insns_int = {
   5534			BPF_LD_IMM64(R0, 3),
   5535			BPF_ALU32_IMM(BPF_SUB, R0, 1),
   5536			BPF_EXIT_INSN(),
   5537		},
   5538		INTERNAL,
   5539		{ },
   5540		{ { 0, 2 } },
   5541	},
   5542	{
   5543		"ALU_SUB_K: 3 - 0 = 3",
   5544		.u.insns_int = {
   5545			BPF_LD_IMM64(R0, 3),
   5546			BPF_ALU32_IMM(BPF_SUB, R0, 0),
   5547			BPF_EXIT_INSN(),
   5548		},
   5549		INTERNAL,
   5550		{ },
   5551		{ { 0, 3 } },
   5552	},
   5553	{
   5554		"ALU_SUB_K: 4294967295 - 4294967294 = 1",
   5555		.u.insns_int = {
   5556			BPF_LD_IMM64(R0, 4294967295U),
   5557			BPF_ALU32_IMM(BPF_SUB, R0, 4294967294U),
   5558			BPF_EXIT_INSN(),
   5559		},
   5560		INTERNAL,
   5561		{ },
   5562		{ { 0, 1 } },
   5563	},
   5564	{
   5565		"ALU64_SUB_K: 3 - 1 = 2",
   5566		.u.insns_int = {
   5567			BPF_LD_IMM64(R0, 3),
   5568			BPF_ALU64_IMM(BPF_SUB, R0, 1),
   5569			BPF_EXIT_INSN(),
   5570		},
   5571		INTERNAL,
   5572		{ },
   5573		{ { 0, 2 } },
   5574	},
   5575	{
   5576		"ALU64_SUB_K: 3 - 0 = 3",
   5577		.u.insns_int = {
   5578			BPF_LD_IMM64(R0, 3),
   5579			BPF_ALU64_IMM(BPF_SUB, R0, 0),
   5580			BPF_EXIT_INSN(),
   5581		},
   5582		INTERNAL,
   5583		{ },
   5584		{ { 0, 3 } },
   5585	},
   5586	{
   5587		"ALU64_SUB_K: 4294967294 - 4294967295 = -1",
   5588		.u.insns_int = {
   5589			BPF_LD_IMM64(R0, 4294967294U),
   5590			BPF_ALU64_IMM(BPF_SUB, R0, 4294967295U),
   5591			BPF_EXIT_INSN(),
   5592		},
   5593		INTERNAL,
   5594		{ },
   5595		{ { 0, -1 } },
   5596	},
   5597	{
   5598		"ALU64_ADD_K: 2147483646 - 2147483647 = -1",
   5599		.u.insns_int = {
   5600			BPF_LD_IMM64(R0, 2147483646),
   5601			BPF_ALU64_IMM(BPF_SUB, R0, 2147483647),
   5602			BPF_EXIT_INSN(),
   5603		},
   5604		INTERNAL,
   5605		{ },
   5606		{ { 0, -1 } },
   5607	},
   5608	/* BPF_ALU | BPF_MUL | BPF_X */
   5609	{
   5610		"ALU_MUL_X: 2 * 3 = 6",
   5611		.u.insns_int = {
   5612			BPF_LD_IMM64(R0, 2),
   5613			BPF_ALU32_IMM(BPF_MOV, R1, 3),
   5614			BPF_ALU32_REG(BPF_MUL, R0, R1),
   5615			BPF_EXIT_INSN(),
   5616		},
   5617		INTERNAL,
   5618		{ },
   5619		{ { 0, 6 } },
   5620	},
   5621	{
   5622		"ALU_MUL_X: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
   5623		.u.insns_int = {
   5624			BPF_LD_IMM64(R0, 2),
   5625			BPF_ALU32_IMM(BPF_MOV, R1, 0x7FFFFFF8),
   5626			BPF_ALU32_REG(BPF_MUL, R0, R1),
   5627			BPF_EXIT_INSN(),
   5628		},
   5629		INTERNAL,
   5630		{ },
   5631		{ { 0, 0xFFFFFFF0 } },
   5632	},
   5633	{
   5634		"ALU_MUL_X: -1 * -1 = 1",
   5635		.u.insns_int = {
   5636			BPF_LD_IMM64(R0, -1),
   5637			BPF_ALU32_IMM(BPF_MOV, R1, -1),
   5638			BPF_ALU32_REG(BPF_MUL, R0, R1),
   5639			BPF_EXIT_INSN(),
   5640		},
   5641		INTERNAL,
   5642		{ },
   5643		{ { 0, 1 } },
   5644	},
   5645	{
   5646		"ALU64_MUL_X: 2 * 3 = 6",
   5647		.u.insns_int = {
   5648			BPF_LD_IMM64(R0, 2),
   5649			BPF_ALU32_IMM(BPF_MOV, R1, 3),
   5650			BPF_ALU64_REG(BPF_MUL, R0, R1),
   5651			BPF_EXIT_INSN(),
   5652		},
   5653		INTERNAL,
   5654		{ },
   5655		{ { 0, 6 } },
   5656	},
   5657	{
   5658		"ALU64_MUL_X: 1 * 2147483647 = 2147483647",
   5659		.u.insns_int = {
   5660			BPF_LD_IMM64(R0, 1),
   5661			BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
   5662			BPF_ALU64_REG(BPF_MUL, R0, R1),
   5663			BPF_EXIT_INSN(),
   5664		},
   5665		INTERNAL,
   5666		{ },
   5667		{ { 0, 2147483647 } },
   5668	},
   5669	{
   5670		"ALU64_MUL_X: 64x64 multiply, low word",
   5671		.u.insns_int = {
   5672			BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
   5673			BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
   5674			BPF_ALU64_REG(BPF_MUL, R0, R1),
   5675			BPF_EXIT_INSN(),
   5676		},
   5677		INTERNAL,
   5678		{ },
   5679		{ { 0, 0xe5618cf0 } }
   5680	},
   5681	{
   5682		"ALU64_MUL_X: 64x64 multiply, high word",
   5683		.u.insns_int = {
   5684			BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
   5685			BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
   5686			BPF_ALU64_REG(BPF_MUL, R0, R1),
   5687			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   5688			BPF_EXIT_INSN(),
   5689		},
   5690		INTERNAL,
   5691		{ },
   5692		{ { 0, 0x2236d88f } }
   5693	},
   5694	/* BPF_ALU | BPF_MUL | BPF_K */
   5695	{
   5696		"ALU_MUL_K: 2 * 3 = 6",
   5697		.u.insns_int = {
   5698			BPF_LD_IMM64(R0, 2),
   5699			BPF_ALU32_IMM(BPF_MUL, R0, 3),
   5700			BPF_EXIT_INSN(),
   5701		},
   5702		INTERNAL,
   5703		{ },
   5704		{ { 0, 6 } },
   5705	},
   5706	{
   5707		"ALU_MUL_K: 3 * 1 = 3",
   5708		.u.insns_int = {
   5709			BPF_LD_IMM64(R0, 3),
   5710			BPF_ALU32_IMM(BPF_MUL, R0, 1),
   5711			BPF_EXIT_INSN(),
   5712		},
   5713		INTERNAL,
   5714		{ },
   5715		{ { 0, 3 } },
   5716	},
   5717	{
   5718		"ALU_MUL_K: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
   5719		.u.insns_int = {
   5720			BPF_LD_IMM64(R0, 2),
   5721			BPF_ALU32_IMM(BPF_MUL, R0, 0x7FFFFFF8),
   5722			BPF_EXIT_INSN(),
   5723		},
   5724		INTERNAL,
   5725		{ },
   5726		{ { 0, 0xFFFFFFF0 } },
   5727	},
   5728	{
   5729		"ALU_MUL_K: 1 * (-1) = 0x00000000ffffffff",
   5730		.u.insns_int = {
   5731			BPF_LD_IMM64(R2, 0x1),
   5732			BPF_LD_IMM64(R3, 0x00000000ffffffff),
   5733			BPF_ALU32_IMM(BPF_MUL, R2, 0xffffffff),
   5734			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5735			BPF_MOV32_IMM(R0, 2),
   5736			BPF_EXIT_INSN(),
   5737			BPF_MOV32_IMM(R0, 1),
   5738			BPF_EXIT_INSN(),
   5739		},
   5740		INTERNAL,
   5741		{ },
   5742		{ { 0, 0x1 } },
   5743	},
   5744	{
   5745		"ALU64_MUL_K: 2 * 3 = 6",
   5746		.u.insns_int = {
   5747			BPF_LD_IMM64(R0, 2),
   5748			BPF_ALU64_IMM(BPF_MUL, R0, 3),
   5749			BPF_EXIT_INSN(),
   5750		},
   5751		INTERNAL,
   5752		{ },
   5753		{ { 0, 6 } },
   5754	},
   5755	{
   5756		"ALU64_MUL_K: 3 * 1 = 3",
   5757		.u.insns_int = {
   5758			BPF_LD_IMM64(R0, 3),
   5759			BPF_ALU64_IMM(BPF_MUL, R0, 1),
   5760			BPF_EXIT_INSN(),
   5761		},
   5762		INTERNAL,
   5763		{ },
   5764		{ { 0, 3 } },
   5765	},
   5766	{
   5767		"ALU64_MUL_K: 1 * 2147483647 = 2147483647",
   5768		.u.insns_int = {
   5769			BPF_LD_IMM64(R0, 1),
   5770			BPF_ALU64_IMM(BPF_MUL, R0, 2147483647),
   5771			BPF_EXIT_INSN(),
   5772		},
   5773		INTERNAL,
   5774		{ },
   5775		{ { 0, 2147483647 } },
   5776	},
   5777	{
   5778		"ALU64_MUL_K: 1 * -2147483647 = -2147483647",
   5779		.u.insns_int = {
   5780			BPF_LD_IMM64(R0, 1),
   5781			BPF_ALU64_IMM(BPF_MUL, R0, -2147483647),
   5782			BPF_EXIT_INSN(),
   5783		},
   5784		INTERNAL,
   5785		{ },
   5786		{ { 0, -2147483647 } },
   5787	},
   5788	{
   5789		"ALU64_MUL_K: 1 * (-1) = 0xffffffffffffffff",
   5790		.u.insns_int = {
   5791			BPF_LD_IMM64(R2, 0x1),
   5792			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
   5793			BPF_ALU64_IMM(BPF_MUL, R2, 0xffffffff),
   5794			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5795			BPF_MOV32_IMM(R0, 2),
   5796			BPF_EXIT_INSN(),
   5797			BPF_MOV32_IMM(R0, 1),
   5798			BPF_EXIT_INSN(),
   5799		},
   5800		INTERNAL,
   5801		{ },
   5802		{ { 0, 0x1 } },
   5803	},
   5804	{
   5805		"ALU64_MUL_K: 64x32 multiply, low word",
   5806		.u.insns_int = {
   5807			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   5808			BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
   5809			BPF_EXIT_INSN(),
   5810		},
   5811		INTERNAL,
   5812		{ },
   5813		{ { 0, 0xe242d208 } }
   5814	},
   5815	{
   5816		"ALU64_MUL_K: 64x32 multiply, high word",
   5817		.u.insns_int = {
   5818			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   5819			BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
   5820			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   5821			BPF_EXIT_INSN(),
   5822		},
   5823		INTERNAL,
   5824		{ },
   5825		{ { 0, 0xc28f5c28 } }
   5826	},
   5827	/* BPF_ALU | BPF_DIV | BPF_X */
   5828	{
   5829		"ALU_DIV_X: 6 / 2 = 3",
   5830		.u.insns_int = {
   5831			BPF_LD_IMM64(R0, 6),
   5832			BPF_ALU32_IMM(BPF_MOV, R1, 2),
   5833			BPF_ALU32_REG(BPF_DIV, R0, R1),
   5834			BPF_EXIT_INSN(),
   5835		},
   5836		INTERNAL,
   5837		{ },
   5838		{ { 0, 3 } },
   5839	},
   5840	{
   5841		"ALU_DIV_X: 4294967295 / 4294967295 = 1",
   5842		.u.insns_int = {
   5843			BPF_LD_IMM64(R0, 4294967295U),
   5844			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
   5845			BPF_ALU32_REG(BPF_DIV, R0, R1),
   5846			BPF_EXIT_INSN(),
   5847		},
   5848		INTERNAL,
   5849		{ },
   5850		{ { 0, 1 } },
   5851	},
   5852	{
   5853		"ALU64_DIV_X: 6 / 2 = 3",
   5854		.u.insns_int = {
   5855			BPF_LD_IMM64(R0, 6),
   5856			BPF_ALU32_IMM(BPF_MOV, R1, 2),
   5857			BPF_ALU64_REG(BPF_DIV, R0, R1),
   5858			BPF_EXIT_INSN(),
   5859		},
   5860		INTERNAL,
   5861		{ },
   5862		{ { 0, 3 } },
   5863	},
   5864	{
   5865		"ALU64_DIV_X: 2147483647 / 2147483647 = 1",
   5866		.u.insns_int = {
   5867			BPF_LD_IMM64(R0, 2147483647),
   5868			BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
   5869			BPF_ALU64_REG(BPF_DIV, R0, R1),
   5870			BPF_EXIT_INSN(),
   5871		},
   5872		INTERNAL,
   5873		{ },
   5874		{ { 0, 1 } },
   5875	},
   5876	{
   5877		"ALU64_DIV_X: 0xffffffffffffffff / (-1) = 0x0000000000000001",
   5878		.u.insns_int = {
   5879			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
   5880			BPF_LD_IMM64(R4, 0xffffffffffffffffLL),
   5881			BPF_LD_IMM64(R3, 0x0000000000000001LL),
   5882			BPF_ALU64_REG(BPF_DIV, R2, R4),
   5883			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5884			BPF_MOV32_IMM(R0, 2),
   5885			BPF_EXIT_INSN(),
   5886			BPF_MOV32_IMM(R0, 1),
   5887			BPF_EXIT_INSN(),
   5888		},
   5889		INTERNAL,
   5890		{ },
   5891		{ { 0, 0x1 } },
   5892	},
   5893	/* BPF_ALU | BPF_DIV | BPF_K */
   5894	{
   5895		"ALU_DIV_K: 6 / 2 = 3",
   5896		.u.insns_int = {
   5897			BPF_LD_IMM64(R0, 6),
   5898			BPF_ALU32_IMM(BPF_DIV, R0, 2),
   5899			BPF_EXIT_INSN(),
   5900		},
   5901		INTERNAL,
   5902		{ },
   5903		{ { 0, 3 } },
   5904	},
   5905	{
   5906		"ALU_DIV_K: 3 / 1 = 3",
   5907		.u.insns_int = {
   5908			BPF_LD_IMM64(R0, 3),
   5909			BPF_ALU32_IMM(BPF_DIV, R0, 1),
   5910			BPF_EXIT_INSN(),
   5911		},
   5912		INTERNAL,
   5913		{ },
   5914		{ { 0, 3 } },
   5915	},
   5916	{
   5917		"ALU_DIV_K: 4294967295 / 4294967295 = 1",
   5918		.u.insns_int = {
   5919			BPF_LD_IMM64(R0, 4294967295U),
   5920			BPF_ALU32_IMM(BPF_DIV, R0, 4294967295U),
   5921			BPF_EXIT_INSN(),
   5922		},
   5923		INTERNAL,
   5924		{ },
   5925		{ { 0, 1 } },
   5926	},
   5927	{
   5928		"ALU_DIV_K: 0xffffffffffffffff / (-1) = 0x1",
   5929		.u.insns_int = {
   5930			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
   5931			BPF_LD_IMM64(R3, 0x1UL),
   5932			BPF_ALU32_IMM(BPF_DIV, R2, 0xffffffff),
   5933			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5934			BPF_MOV32_IMM(R0, 2),
   5935			BPF_EXIT_INSN(),
   5936			BPF_MOV32_IMM(R0, 1),
   5937			BPF_EXIT_INSN(),
   5938		},
   5939		INTERNAL,
   5940		{ },
   5941		{ { 0, 0x1 } },
   5942	},
   5943	{
   5944		"ALU64_DIV_K: 6 / 2 = 3",
   5945		.u.insns_int = {
   5946			BPF_LD_IMM64(R0, 6),
   5947			BPF_ALU64_IMM(BPF_DIV, R0, 2),
   5948			BPF_EXIT_INSN(),
   5949		},
   5950		INTERNAL,
   5951		{ },
   5952		{ { 0, 3 } },
   5953	},
   5954	{
   5955		"ALU64_DIV_K: 3 / 1 = 3",
   5956		.u.insns_int = {
   5957			BPF_LD_IMM64(R0, 3),
   5958			BPF_ALU64_IMM(BPF_DIV, R0, 1),
   5959			BPF_EXIT_INSN(),
   5960		},
   5961		INTERNAL,
   5962		{ },
   5963		{ { 0, 3 } },
   5964	},
   5965	{
   5966		"ALU64_DIV_K: 2147483647 / 2147483647 = 1",
   5967		.u.insns_int = {
   5968			BPF_LD_IMM64(R0, 2147483647),
   5969			BPF_ALU64_IMM(BPF_DIV, R0, 2147483647),
   5970			BPF_EXIT_INSN(),
   5971		},
   5972		INTERNAL,
   5973		{ },
   5974		{ { 0, 1 } },
   5975	},
   5976	{
   5977		"ALU64_DIV_K: 0xffffffffffffffff / (-1) = 0x0000000000000001",
   5978		.u.insns_int = {
   5979			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
   5980			BPF_LD_IMM64(R3, 0x0000000000000001LL),
   5981			BPF_ALU64_IMM(BPF_DIV, R2, 0xffffffff),
   5982			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   5983			BPF_MOV32_IMM(R0, 2),
   5984			BPF_EXIT_INSN(),
   5985			BPF_MOV32_IMM(R0, 1),
   5986			BPF_EXIT_INSN(),
   5987		},
   5988		INTERNAL,
   5989		{ },
   5990		{ { 0, 0x1 } },
   5991	},
   5992	/* BPF_ALU | BPF_MOD | BPF_X */
   5993	{
   5994		"ALU_MOD_X: 3 % 2 = 1",
   5995		.u.insns_int = {
   5996			BPF_LD_IMM64(R0, 3),
   5997			BPF_ALU32_IMM(BPF_MOV, R1, 2),
   5998			BPF_ALU32_REG(BPF_MOD, R0, R1),
   5999			BPF_EXIT_INSN(),
   6000		},
   6001		INTERNAL,
   6002		{ },
   6003		{ { 0, 1 } },
   6004	},
   6005	{
   6006		"ALU_MOD_X: 4294967295 % 4294967293 = 2",
   6007		.u.insns_int = {
   6008			BPF_LD_IMM64(R0, 4294967295U),
   6009			BPF_ALU32_IMM(BPF_MOV, R1, 4294967293U),
   6010			BPF_ALU32_REG(BPF_MOD, R0, R1),
   6011			BPF_EXIT_INSN(),
   6012		},
   6013		INTERNAL,
   6014		{ },
   6015		{ { 0, 2 } },
   6016	},
   6017	{
   6018		"ALU64_MOD_X: 3 % 2 = 1",
   6019		.u.insns_int = {
   6020			BPF_LD_IMM64(R0, 3),
   6021			BPF_ALU32_IMM(BPF_MOV, R1, 2),
   6022			BPF_ALU64_REG(BPF_MOD, R0, R1),
   6023			BPF_EXIT_INSN(),
   6024		},
   6025		INTERNAL,
   6026		{ },
   6027		{ { 0, 1 } },
   6028	},
   6029	{
   6030		"ALU64_MOD_X: 2147483647 % 2147483645 = 2",
   6031		.u.insns_int = {
   6032			BPF_LD_IMM64(R0, 2147483647),
   6033			BPF_ALU32_IMM(BPF_MOV, R1, 2147483645),
   6034			BPF_ALU64_REG(BPF_MOD, R0, R1),
   6035			BPF_EXIT_INSN(),
   6036		},
   6037		INTERNAL,
   6038		{ },
   6039		{ { 0, 2 } },
   6040	},
   6041	/* BPF_ALU | BPF_MOD | BPF_K */
   6042	{
   6043		"ALU_MOD_K: 3 % 2 = 1",
   6044		.u.insns_int = {
   6045			BPF_LD_IMM64(R0, 3),
   6046			BPF_ALU32_IMM(BPF_MOD, R0, 2),
   6047			BPF_EXIT_INSN(),
   6048		},
   6049		INTERNAL,
   6050		{ },
   6051		{ { 0, 1 } },
   6052	},
   6053	{
   6054		"ALU_MOD_K: 3 % 1 = 0",
   6055		.u.insns_int = {
   6056			BPF_LD_IMM64(R0, 3),
   6057			BPF_ALU32_IMM(BPF_MOD, R0, 1),
   6058			BPF_EXIT_INSN(),
   6059		},
   6060		INTERNAL,
   6061		{ },
   6062		{ { 0, 0 } },
   6063	},
   6064	{
   6065		"ALU_MOD_K: 4294967295 % 4294967293 = 2",
   6066		.u.insns_int = {
   6067			BPF_LD_IMM64(R0, 4294967295U),
   6068			BPF_ALU32_IMM(BPF_MOD, R0, 4294967293U),
   6069			BPF_EXIT_INSN(),
   6070		},
   6071		INTERNAL,
   6072		{ },
   6073		{ { 0, 2 } },
   6074	},
   6075	{
   6076		"ALU64_MOD_K: 3 % 2 = 1",
   6077		.u.insns_int = {
   6078			BPF_LD_IMM64(R0, 3),
   6079			BPF_ALU64_IMM(BPF_MOD, R0, 2),
   6080			BPF_EXIT_INSN(),
   6081		},
   6082		INTERNAL,
   6083		{ },
   6084		{ { 0, 1 } },
   6085	},
   6086	{
   6087		"ALU64_MOD_K: 3 % 1 = 0",
   6088		.u.insns_int = {
   6089			BPF_LD_IMM64(R0, 3),
   6090			BPF_ALU64_IMM(BPF_MOD, R0, 1),
   6091			BPF_EXIT_INSN(),
   6092		},
   6093		INTERNAL,
   6094		{ },
   6095		{ { 0, 0 } },
   6096	},
   6097	{
   6098		"ALU64_MOD_K: 2147483647 % 2147483645 = 2",
   6099		.u.insns_int = {
   6100			BPF_LD_IMM64(R0, 2147483647),
   6101			BPF_ALU64_IMM(BPF_MOD, R0, 2147483645),
   6102			BPF_EXIT_INSN(),
   6103		},
   6104		INTERNAL,
   6105		{ },
   6106		{ { 0, 2 } },
   6107	},
   6108	/* BPF_ALU | BPF_AND | BPF_X */
   6109	{
   6110		"ALU_AND_X: 3 & 2 = 2",
   6111		.u.insns_int = {
   6112			BPF_LD_IMM64(R0, 3),
   6113			BPF_ALU32_IMM(BPF_MOV, R1, 2),
   6114			BPF_ALU32_REG(BPF_AND, R0, R1),
   6115			BPF_EXIT_INSN(),
   6116		},
   6117		INTERNAL,
   6118		{ },
   6119		{ { 0, 2 } },
   6120	},
   6121	{
   6122		"ALU_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
   6123		.u.insns_int = {
   6124			BPF_LD_IMM64(R0, 0xffffffff),
   6125			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
   6126			BPF_ALU32_REG(BPF_AND, R0, R1),
   6127			BPF_EXIT_INSN(),
   6128		},
   6129		INTERNAL,
   6130		{ },
   6131		{ { 0, 0xffffffff } },
   6132	},
   6133	{
   6134		"ALU64_AND_X: 3 & 2 = 2",
   6135		.u.insns_int = {
   6136			BPF_LD_IMM64(R0, 3),
   6137			BPF_ALU32_IMM(BPF_MOV, R1, 2),
   6138			BPF_ALU64_REG(BPF_AND, R0, R1),
   6139			BPF_EXIT_INSN(),
   6140		},
   6141		INTERNAL,
   6142		{ },
   6143		{ { 0, 2 } },
   6144	},
   6145	{
   6146		"ALU64_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
   6147		.u.insns_int = {
   6148			BPF_LD_IMM64(R0, 0xffffffff),
   6149			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
   6150			BPF_ALU64_REG(BPF_AND, R0, R1),
   6151			BPF_EXIT_INSN(),
   6152		},
   6153		INTERNAL,
   6154		{ },
   6155		{ { 0, 0xffffffff } },
   6156	},
   6157	/* BPF_ALU | BPF_AND | BPF_K */
   6158	{
   6159		"ALU_AND_K: 3 & 2 = 2",
   6160		.u.insns_int = {
   6161			BPF_LD_IMM64(R0, 3),
   6162			BPF_ALU32_IMM(BPF_AND, R0, 2),
   6163			BPF_EXIT_INSN(),
   6164		},
   6165		INTERNAL,
   6166		{ },
   6167		{ { 0, 2 } },
   6168	},
   6169	{
   6170		"ALU_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
   6171		.u.insns_int = {
   6172			BPF_LD_IMM64(R0, 0xffffffff),
   6173			BPF_ALU32_IMM(BPF_AND, R0, 0xffffffff),
   6174			BPF_EXIT_INSN(),
   6175		},
   6176		INTERNAL,
   6177		{ },
   6178		{ { 0, 0xffffffff } },
   6179	},
   6180	{
   6181		"ALU_AND_K: Small immediate",
   6182		.u.insns_int = {
   6183			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
   6184			BPF_ALU32_IMM(BPF_AND, R0, 15),
   6185			BPF_EXIT_INSN(),
   6186		},
   6187		INTERNAL,
   6188		{ },
   6189		{ { 0, 4 } }
   6190	},
   6191	{
   6192		"ALU_AND_K: Large immediate",
   6193		.u.insns_int = {
   6194			BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
   6195			BPF_ALU32_IMM(BPF_AND, R0, 0xafbfcfdf),
   6196			BPF_EXIT_INSN(),
   6197		},
   6198		INTERNAL,
   6199		{ },
   6200		{ { 0, 0xa1b2c3d4 } }
   6201	},
   6202	{
   6203		"ALU_AND_K: Zero extension",
   6204		.u.insns_int = {
   6205			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6206			BPF_LD_IMM64(R1, 0x0000000080a0c0e0LL),
   6207			BPF_ALU32_IMM(BPF_AND, R0, 0xf0f0f0f0),
   6208			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
   6209			BPF_MOV32_IMM(R0, 2),
   6210			BPF_EXIT_INSN(),
   6211			BPF_MOV32_IMM(R0, 1),
   6212			BPF_EXIT_INSN(),
   6213		},
   6214		INTERNAL,
   6215		{ },
   6216		{ { 0, 1 } }
   6217	},
   6218	{
   6219		"ALU64_AND_K: 3 & 2 = 2",
   6220		.u.insns_int = {
   6221			BPF_LD_IMM64(R0, 3),
   6222			BPF_ALU64_IMM(BPF_AND, R0, 2),
   6223			BPF_EXIT_INSN(),
   6224		},
   6225		INTERNAL,
   6226		{ },
   6227		{ { 0, 2 } },
   6228	},
   6229	{
   6230		"ALU64_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
   6231		.u.insns_int = {
   6232			BPF_LD_IMM64(R0, 0xffffffff),
   6233			BPF_ALU64_IMM(BPF_AND, R0, 0xffffffff),
   6234			BPF_EXIT_INSN(),
   6235		},
   6236		INTERNAL,
   6237		{ },
   6238		{ { 0, 0xffffffff } },
   6239	},
   6240	{
   6241		"ALU64_AND_K: 0x0000ffffffff0000 & 0x0 = 0x0000000000000000",
   6242		.u.insns_int = {
   6243			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
   6244			BPF_LD_IMM64(R3, 0x0000000000000000LL),
   6245			BPF_ALU64_IMM(BPF_AND, R2, 0x0),
   6246			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   6247			BPF_MOV32_IMM(R0, 2),
   6248			BPF_EXIT_INSN(),
   6249			BPF_MOV32_IMM(R0, 1),
   6250			BPF_EXIT_INSN(),
   6251		},
   6252		INTERNAL,
   6253		{ },
   6254		{ { 0, 0x1 } },
   6255	},
   6256	{
   6257		"ALU64_AND_K: 0x0000ffffffff0000 & -1 = 0x0000ffffffff0000",
   6258		.u.insns_int = {
   6259			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
   6260			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
   6261			BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
   6262			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   6263			BPF_MOV32_IMM(R0, 2),
   6264			BPF_EXIT_INSN(),
   6265			BPF_MOV32_IMM(R0, 1),
   6266			BPF_EXIT_INSN(),
   6267		},
   6268		INTERNAL,
   6269		{ },
   6270		{ { 0, 0x1 } },
   6271	},
   6272	{
   6273		"ALU64_AND_K: 0xffffffffffffffff & -1 = 0xffffffffffffffff",
   6274		.u.insns_int = {
   6275			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
   6276			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
   6277			BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
   6278			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   6279			BPF_MOV32_IMM(R0, 2),
   6280			BPF_EXIT_INSN(),
   6281			BPF_MOV32_IMM(R0, 1),
   6282			BPF_EXIT_INSN(),
   6283		},
   6284		INTERNAL,
   6285		{ },
   6286		{ { 0, 0x1 } },
   6287	},
   6288	{
   6289		"ALU64_AND_K: Sign extension 1",
   6290		.u.insns_int = {
   6291			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6292			BPF_LD_IMM64(R1, 0x00000000090b0d0fLL),
   6293			BPF_ALU64_IMM(BPF_AND, R0, 0x0f0f0f0f),
   6294			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
   6295			BPF_MOV32_IMM(R0, 2),
   6296			BPF_EXIT_INSN(),
   6297			BPF_MOV32_IMM(R0, 1),
   6298			BPF_EXIT_INSN(),
   6299		},
   6300		INTERNAL,
   6301		{ },
   6302		{ { 0, 1 } }
   6303	},
   6304	{
   6305		"ALU64_AND_K: Sign extension 2",
   6306		.u.insns_int = {
   6307			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6308			BPF_LD_IMM64(R1, 0x0123456780a0c0e0LL),
   6309			BPF_ALU64_IMM(BPF_AND, R0, 0xf0f0f0f0),
   6310			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
   6311			BPF_MOV32_IMM(R0, 2),
   6312			BPF_EXIT_INSN(),
   6313			BPF_MOV32_IMM(R0, 1),
   6314			BPF_EXIT_INSN(),
   6315		},
   6316		INTERNAL,
   6317		{ },
   6318		{ { 0, 1 } }
   6319	},
   6320	/* BPF_ALU | BPF_OR | BPF_X */
   6321	{
   6322		"ALU_OR_X: 1 | 2 = 3",
   6323		.u.insns_int = {
   6324			BPF_LD_IMM64(R0, 1),
   6325			BPF_ALU32_IMM(BPF_MOV, R1, 2),
   6326			BPF_ALU32_REG(BPF_OR, R0, R1),
   6327			BPF_EXIT_INSN(),
   6328		},
   6329		INTERNAL,
   6330		{ },
   6331		{ { 0, 3 } },
   6332	},
   6333	{
   6334		"ALU_OR_X: 0x0 | 0xffffffff = 0xffffffff",
   6335		.u.insns_int = {
   6336			BPF_LD_IMM64(R0, 0),
   6337			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
   6338			BPF_ALU32_REG(BPF_OR, R0, R1),
   6339			BPF_EXIT_INSN(),
   6340		},
   6341		INTERNAL,
   6342		{ },
   6343		{ { 0, 0xffffffff } },
   6344	},
   6345	{
   6346		"ALU64_OR_X: 1 | 2 = 3",
   6347		.u.insns_int = {
   6348			BPF_LD_IMM64(R0, 1),
   6349			BPF_ALU32_IMM(BPF_MOV, R1, 2),
   6350			BPF_ALU64_REG(BPF_OR, R0, R1),
   6351			BPF_EXIT_INSN(),
   6352		},
   6353		INTERNAL,
   6354		{ },
   6355		{ { 0, 3 } },
   6356	},
   6357	{
   6358		"ALU64_OR_X: 0 | 0xffffffff = 0xffffffff",
   6359		.u.insns_int = {
   6360			BPF_LD_IMM64(R0, 0),
   6361			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
   6362			BPF_ALU64_REG(BPF_OR, R0, R1),
   6363			BPF_EXIT_INSN(),
   6364		},
   6365		INTERNAL,
   6366		{ },
   6367		{ { 0, 0xffffffff } },
   6368	},
   6369	/* BPF_ALU | BPF_OR | BPF_K */
   6370	{
   6371		"ALU_OR_K: 1 | 2 = 3",
   6372		.u.insns_int = {
   6373			BPF_LD_IMM64(R0, 1),
   6374			BPF_ALU32_IMM(BPF_OR, R0, 2),
   6375			BPF_EXIT_INSN(),
   6376		},
   6377		INTERNAL,
   6378		{ },
   6379		{ { 0, 3 } },
   6380	},
   6381	{
   6382		"ALU_OR_K: 0 & 0xffffffff = 0xffffffff",
   6383		.u.insns_int = {
   6384			BPF_LD_IMM64(R0, 0),
   6385			BPF_ALU32_IMM(BPF_OR, R0, 0xffffffff),
   6386			BPF_EXIT_INSN(),
   6387		},
   6388		INTERNAL,
   6389		{ },
   6390		{ { 0, 0xffffffff } },
   6391	},
   6392	{
   6393		"ALU_OR_K: Small immediate",
   6394		.u.insns_int = {
   6395			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
   6396			BPF_ALU32_IMM(BPF_OR, R0, 1),
   6397			BPF_EXIT_INSN(),
   6398		},
   6399		INTERNAL,
   6400		{ },
   6401		{ { 0, 0x01020305 } }
   6402	},
   6403	{
   6404		"ALU_OR_K: Large immediate",
   6405		.u.insns_int = {
   6406			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
   6407			BPF_ALU32_IMM(BPF_OR, R0, 0xa0b0c0d0),
   6408			BPF_EXIT_INSN(),
   6409		},
   6410		INTERNAL,
   6411		{ },
   6412		{ { 0, 0xa1b2c3d4 } }
   6413	},
   6414	{
   6415		"ALU_OR_K: Zero extension",
   6416		.u.insns_int = {
   6417			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6418			BPF_LD_IMM64(R1, 0x00000000f9fbfdffLL),
   6419			BPF_ALU32_IMM(BPF_OR, R0, 0xf0f0f0f0),
   6420			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
   6421			BPF_MOV32_IMM(R0, 2),
   6422			BPF_EXIT_INSN(),
   6423			BPF_MOV32_IMM(R0, 1),
   6424			BPF_EXIT_INSN(),
   6425		},
   6426		INTERNAL,
   6427		{ },
   6428		{ { 0, 1 } }
   6429	},
   6430	{
   6431		"ALU64_OR_K: 1 | 2 = 3",
   6432		.u.insns_int = {
   6433			BPF_LD_IMM64(R0, 1),
   6434			BPF_ALU64_IMM(BPF_OR, R0, 2),
   6435			BPF_EXIT_INSN(),
   6436		},
   6437		INTERNAL,
   6438		{ },
   6439		{ { 0, 3 } },
   6440	},
   6441	{
   6442		"ALU64_OR_K: 0 & 0xffffffff = 0xffffffff",
   6443		.u.insns_int = {
   6444			BPF_LD_IMM64(R0, 0),
   6445			BPF_ALU64_IMM(BPF_OR, R0, 0xffffffff),
   6446			BPF_EXIT_INSN(),
   6447		},
   6448		INTERNAL,
   6449		{ },
   6450		{ { 0, 0xffffffff } },
   6451	},
   6452	{
   6453		"ALU64_OR_K: 0x0000ffffffff0000 | 0x0 = 0x0000ffffffff0000",
   6454		.u.insns_int = {
   6455			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
   6456			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
   6457			BPF_ALU64_IMM(BPF_OR, R2, 0x0),
   6458			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   6459			BPF_MOV32_IMM(R0, 2),
   6460			BPF_EXIT_INSN(),
   6461			BPF_MOV32_IMM(R0, 1),
   6462			BPF_EXIT_INSN(),
   6463		},
   6464		INTERNAL,
   6465		{ },
   6466		{ { 0, 0x1 } },
   6467	},
   6468	{
   6469		"ALU64_OR_K: 0x0000ffffffff0000 | -1 = 0xffffffffffffffff",
   6470		.u.insns_int = {
   6471			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
   6472			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
   6473			BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
   6474			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   6475			BPF_MOV32_IMM(R0, 2),
   6476			BPF_EXIT_INSN(),
   6477			BPF_MOV32_IMM(R0, 1),
   6478			BPF_EXIT_INSN(),
   6479		},
   6480		INTERNAL,
   6481		{ },
   6482		{ { 0, 0x1 } },
   6483	},
   6484	{
   6485		"ALU64_OR_K: 0x000000000000000 | -1 = 0xffffffffffffffff",
   6486		.u.insns_int = {
   6487			BPF_LD_IMM64(R2, 0x0000000000000000LL),
   6488			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
   6489			BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
   6490			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   6491			BPF_MOV32_IMM(R0, 2),
   6492			BPF_EXIT_INSN(),
   6493			BPF_MOV32_IMM(R0, 1),
   6494			BPF_EXIT_INSN(),
   6495		},
   6496		INTERNAL,
   6497		{ },
   6498		{ { 0, 0x1 } },
   6499	},
   6500	{
   6501		"ALU64_OR_K: Sign extension 1",
   6502		.u.insns_int = {
   6503			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6504			BPF_LD_IMM64(R1, 0x012345678fafcfefLL),
   6505			BPF_ALU64_IMM(BPF_OR, R0, 0x0f0f0f0f),
   6506			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
   6507			BPF_MOV32_IMM(R0, 2),
   6508			BPF_EXIT_INSN(),
   6509			BPF_MOV32_IMM(R0, 1),
   6510			BPF_EXIT_INSN(),
   6511		},
   6512		INTERNAL,
   6513		{ },
   6514		{ { 0, 1 } }
   6515	},
   6516	{
   6517		"ALU64_OR_K: Sign extension 2",
   6518		.u.insns_int = {
   6519			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6520			BPF_LD_IMM64(R1, 0xfffffffff9fbfdffLL),
   6521			BPF_ALU64_IMM(BPF_OR, R0, 0xf0f0f0f0),
   6522			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
   6523			BPF_MOV32_IMM(R0, 2),
   6524			BPF_EXIT_INSN(),
   6525			BPF_MOV32_IMM(R0, 1),
   6526			BPF_EXIT_INSN(),
   6527		},
   6528		INTERNAL,
   6529		{ },
   6530		{ { 0, 1 } }
   6531	},
   6532	/* BPF_ALU | BPF_XOR | BPF_X */
   6533	{
   6534		"ALU_XOR_X: 5 ^ 6 = 3",
   6535		.u.insns_int = {
   6536			BPF_LD_IMM64(R0, 5),
   6537			BPF_ALU32_IMM(BPF_MOV, R1, 6),
   6538			BPF_ALU32_REG(BPF_XOR, R0, R1),
   6539			BPF_EXIT_INSN(),
   6540		},
   6541		INTERNAL,
   6542		{ },
   6543		{ { 0, 3 } },
   6544	},
   6545	{
   6546		"ALU_XOR_X: 0x1 ^ 0xffffffff = 0xfffffffe",
   6547		.u.insns_int = {
   6548			BPF_LD_IMM64(R0, 1),
   6549			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
   6550			BPF_ALU32_REG(BPF_XOR, R0, R1),
   6551			BPF_EXIT_INSN(),
   6552		},
   6553		INTERNAL,
   6554		{ },
   6555		{ { 0, 0xfffffffe } },
   6556	},
   6557	{
   6558		"ALU64_XOR_X: 5 ^ 6 = 3",
   6559		.u.insns_int = {
   6560			BPF_LD_IMM64(R0, 5),
   6561			BPF_ALU32_IMM(BPF_MOV, R1, 6),
   6562			BPF_ALU64_REG(BPF_XOR, R0, R1),
   6563			BPF_EXIT_INSN(),
   6564		},
   6565		INTERNAL,
   6566		{ },
   6567		{ { 0, 3 } },
   6568	},
   6569	{
   6570		"ALU64_XOR_X: 1 ^ 0xffffffff = 0xfffffffe",
   6571		.u.insns_int = {
   6572			BPF_LD_IMM64(R0, 1),
   6573			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
   6574			BPF_ALU64_REG(BPF_XOR, R0, R1),
   6575			BPF_EXIT_INSN(),
   6576		},
   6577		INTERNAL,
   6578		{ },
   6579		{ { 0, 0xfffffffe } },
   6580	},
   6581	/* BPF_ALU | BPF_XOR | BPF_K */
   6582	{
   6583		"ALU_XOR_K: 5 ^ 6 = 3",
   6584		.u.insns_int = {
   6585			BPF_LD_IMM64(R0, 5),
   6586			BPF_ALU32_IMM(BPF_XOR, R0, 6),
   6587			BPF_EXIT_INSN(),
   6588		},
   6589		INTERNAL,
   6590		{ },
   6591		{ { 0, 3 } },
   6592	},
   6593	{
   6594		"ALU_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
   6595		.u.insns_int = {
   6596			BPF_LD_IMM64(R0, 1),
   6597			BPF_ALU32_IMM(BPF_XOR, R0, 0xffffffff),
   6598			BPF_EXIT_INSN(),
   6599		},
   6600		INTERNAL,
   6601		{ },
   6602		{ { 0, 0xfffffffe } },
   6603	},
   6604	{
   6605		"ALU_XOR_K: Small immediate",
   6606		.u.insns_int = {
   6607			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
   6608			BPF_ALU32_IMM(BPF_XOR, R0, 15),
   6609			BPF_EXIT_INSN(),
   6610		},
   6611		INTERNAL,
   6612		{ },
   6613		{ { 0, 0x0102030b } }
   6614	},
   6615	{
   6616		"ALU_XOR_K: Large immediate",
   6617		.u.insns_int = {
   6618			BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
   6619			BPF_ALU32_IMM(BPF_XOR, R0, 0xafbfcfdf),
   6620			BPF_EXIT_INSN(),
   6621		},
   6622		INTERNAL,
   6623		{ },
   6624		{ { 0, 0x5e4d3c2b } }
   6625	},
   6626	{
   6627		"ALU_XOR_K: Zero extension",
   6628		.u.insns_int = {
   6629			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6630			BPF_LD_IMM64(R1, 0x00000000795b3d1fLL),
   6631			BPF_ALU32_IMM(BPF_XOR, R0, 0xf0f0f0f0),
   6632			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
   6633			BPF_MOV32_IMM(R0, 2),
   6634			BPF_EXIT_INSN(),
   6635			BPF_MOV32_IMM(R0, 1),
   6636			BPF_EXIT_INSN(),
   6637		},
   6638		INTERNAL,
   6639		{ },
   6640		{ { 0, 1 } }
   6641	},
   6642	{
   6643		"ALU64_XOR_K: 5 ^ 6 = 3",
   6644		.u.insns_int = {
   6645			BPF_LD_IMM64(R0, 5),
   6646			BPF_ALU64_IMM(BPF_XOR, R0, 6),
   6647			BPF_EXIT_INSN(),
   6648		},
   6649		INTERNAL,
   6650		{ },
   6651		{ { 0, 3 } },
   6652	},
   6653	{
   6654		"ALU64_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
   6655		.u.insns_int = {
   6656			BPF_LD_IMM64(R0, 1),
   6657			BPF_ALU64_IMM(BPF_XOR, R0, 0xffffffff),
   6658			BPF_EXIT_INSN(),
   6659		},
   6660		INTERNAL,
   6661		{ },
   6662		{ { 0, 0xfffffffe } },
   6663	},
   6664	{
   6665		"ALU64_XOR_K: 0x0000ffffffff0000 ^ 0x0 = 0x0000ffffffff0000",
   6666		.u.insns_int = {
   6667			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
   6668			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
   6669			BPF_ALU64_IMM(BPF_XOR, R2, 0x0),
   6670			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   6671			BPF_MOV32_IMM(R0, 2),
   6672			BPF_EXIT_INSN(),
   6673			BPF_MOV32_IMM(R0, 1),
   6674			BPF_EXIT_INSN(),
   6675		},
   6676		INTERNAL,
   6677		{ },
   6678		{ { 0, 0x1 } },
   6679	},
   6680	{
   6681		"ALU64_XOR_K: 0x0000ffffffff0000 ^ -1 = 0xffff00000000ffff",
   6682		.u.insns_int = {
   6683			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
   6684			BPF_LD_IMM64(R3, 0xffff00000000ffffLL),
   6685			BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
   6686			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   6687			BPF_MOV32_IMM(R0, 2),
   6688			BPF_EXIT_INSN(),
   6689			BPF_MOV32_IMM(R0, 1),
   6690			BPF_EXIT_INSN(),
   6691		},
   6692		INTERNAL,
   6693		{ },
   6694		{ { 0, 0x1 } },
   6695	},
   6696	{
   6697		"ALU64_XOR_K: 0x000000000000000 ^ -1 = 0xffffffffffffffff",
   6698		.u.insns_int = {
   6699			BPF_LD_IMM64(R2, 0x0000000000000000LL),
   6700			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
   6701			BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
   6702			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   6703			BPF_MOV32_IMM(R0, 2),
   6704			BPF_EXIT_INSN(),
   6705			BPF_MOV32_IMM(R0, 1),
   6706			BPF_EXIT_INSN(),
   6707		},
   6708		INTERNAL,
   6709		{ },
   6710		{ { 0, 0x1 } },
   6711	},
   6712	{
   6713		"ALU64_XOR_K: Sign extension 1",
   6714		.u.insns_int = {
   6715			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6716			BPF_LD_IMM64(R1, 0x0123456786a4c2e0LL),
   6717			BPF_ALU64_IMM(BPF_XOR, R0, 0x0f0f0f0f),
   6718			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
   6719			BPF_MOV32_IMM(R0, 2),
   6720			BPF_EXIT_INSN(),
   6721			BPF_MOV32_IMM(R0, 1),
   6722			BPF_EXIT_INSN(),
   6723		},
   6724		INTERNAL,
   6725		{ },
   6726		{ { 0, 1 } }
   6727	},
   6728	{
   6729		"ALU64_XOR_K: Sign extension 2",
   6730		.u.insns_int = {
   6731			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6732			BPF_LD_IMM64(R1, 0xfedcba98795b3d1fLL),
   6733			BPF_ALU64_IMM(BPF_XOR, R0, 0xf0f0f0f0),
   6734			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
   6735			BPF_MOV32_IMM(R0, 2),
   6736			BPF_EXIT_INSN(),
   6737			BPF_MOV32_IMM(R0, 1),
   6738			BPF_EXIT_INSN(),
   6739		},
   6740		INTERNAL,
   6741		{ },
   6742		{ { 0, 1 } }
   6743	},
   6744	/* BPF_ALU | BPF_LSH | BPF_X */
   6745	{
   6746		"ALU_LSH_X: 1 << 1 = 2",
   6747		.u.insns_int = {
   6748			BPF_LD_IMM64(R0, 1),
   6749			BPF_ALU32_IMM(BPF_MOV, R1, 1),
   6750			BPF_ALU32_REG(BPF_LSH, R0, R1),
   6751			BPF_EXIT_INSN(),
   6752		},
   6753		INTERNAL,
   6754		{ },
   6755		{ { 0, 2 } },
   6756	},
   6757	{
   6758		"ALU_LSH_X: 1 << 31 = 0x80000000",
   6759		.u.insns_int = {
   6760			BPF_LD_IMM64(R0, 1),
   6761			BPF_ALU32_IMM(BPF_MOV, R1, 31),
   6762			BPF_ALU32_REG(BPF_LSH, R0, R1),
   6763			BPF_EXIT_INSN(),
   6764		},
   6765		INTERNAL,
   6766		{ },
   6767		{ { 0, 0x80000000 } },
   6768	},
   6769	{
   6770		"ALU_LSH_X: 0x12345678 << 12 = 0x45678000",
   6771		.u.insns_int = {
   6772			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
   6773			BPF_ALU32_IMM(BPF_MOV, R1, 12),
   6774			BPF_ALU32_REG(BPF_LSH, R0, R1),
   6775			BPF_EXIT_INSN(),
   6776		},
   6777		INTERNAL,
   6778		{ },
   6779		{ { 0, 0x45678000 } }
   6780	},
   6781	{
   6782		"ALU64_LSH_X: 1 << 1 = 2",
   6783		.u.insns_int = {
   6784			BPF_LD_IMM64(R0, 1),
   6785			BPF_ALU32_IMM(BPF_MOV, R1, 1),
   6786			BPF_ALU64_REG(BPF_LSH, R0, R1),
   6787			BPF_EXIT_INSN(),
   6788		},
   6789		INTERNAL,
   6790		{ },
   6791		{ { 0, 2 } },
   6792	},
   6793	{
   6794		"ALU64_LSH_X: 1 << 31 = 0x80000000",
   6795		.u.insns_int = {
   6796			BPF_LD_IMM64(R0, 1),
   6797			BPF_ALU32_IMM(BPF_MOV, R1, 31),
   6798			BPF_ALU64_REG(BPF_LSH, R0, R1),
   6799			BPF_EXIT_INSN(),
   6800		},
   6801		INTERNAL,
   6802		{ },
   6803		{ { 0, 0x80000000 } },
   6804	},
   6805	{
   6806		"ALU64_LSH_X: Shift < 32, low word",
   6807		.u.insns_int = {
   6808			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6809			BPF_ALU32_IMM(BPF_MOV, R1, 12),
   6810			BPF_ALU64_REG(BPF_LSH, R0, R1),
   6811			BPF_EXIT_INSN(),
   6812		},
   6813		INTERNAL,
   6814		{ },
   6815		{ { 0, 0xbcdef000 } }
   6816	},
   6817	{
   6818		"ALU64_LSH_X: Shift < 32, high word",
   6819		.u.insns_int = {
   6820			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6821			BPF_ALU32_IMM(BPF_MOV, R1, 12),
   6822			BPF_ALU64_REG(BPF_LSH, R0, R1),
   6823			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   6824			BPF_EXIT_INSN(),
   6825		},
   6826		INTERNAL,
   6827		{ },
   6828		{ { 0, 0x3456789a } }
   6829	},
   6830	{
   6831		"ALU64_LSH_X: Shift > 32, low word",
   6832		.u.insns_int = {
   6833			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6834			BPF_ALU32_IMM(BPF_MOV, R1, 36),
   6835			BPF_ALU64_REG(BPF_LSH, R0, R1),
   6836			BPF_EXIT_INSN(),
   6837		},
   6838		INTERNAL,
   6839		{ },
   6840		{ { 0, 0 } }
   6841	},
   6842	{
   6843		"ALU64_LSH_X: Shift > 32, high word",
   6844		.u.insns_int = {
   6845			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6846			BPF_ALU32_IMM(BPF_MOV, R1, 36),
   6847			BPF_ALU64_REG(BPF_LSH, R0, R1),
   6848			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   6849			BPF_EXIT_INSN(),
   6850		},
   6851		INTERNAL,
   6852		{ },
   6853		{ { 0, 0x9abcdef0 } }
   6854	},
   6855	{
   6856		"ALU64_LSH_X: Shift == 32, low word",
   6857		.u.insns_int = {
   6858			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6859			BPF_ALU32_IMM(BPF_MOV, R1, 32),
   6860			BPF_ALU64_REG(BPF_LSH, R0, R1),
   6861			BPF_EXIT_INSN(),
   6862		},
   6863		INTERNAL,
   6864		{ },
   6865		{ { 0, 0 } }
   6866	},
   6867	{
   6868		"ALU64_LSH_X: Shift == 32, high word",
   6869		.u.insns_int = {
   6870			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6871			BPF_ALU32_IMM(BPF_MOV, R1, 32),
   6872			BPF_ALU64_REG(BPF_LSH, R0, R1),
   6873			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   6874			BPF_EXIT_INSN(),
   6875		},
   6876		INTERNAL,
   6877		{ },
   6878		{ { 0, 0x89abcdef } }
   6879	},
   6880	{
   6881		"ALU64_LSH_X: Zero shift, low word",
   6882		.u.insns_int = {
   6883			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6884			BPF_ALU32_IMM(BPF_MOV, R1, 0),
   6885			BPF_ALU64_REG(BPF_LSH, R0, R1),
   6886			BPF_EXIT_INSN(),
   6887		},
   6888		INTERNAL,
   6889		{ },
   6890		{ { 0, 0x89abcdef } }
   6891	},
   6892	{
   6893		"ALU64_LSH_X: Zero shift, high word",
   6894		.u.insns_int = {
   6895			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6896			BPF_ALU32_IMM(BPF_MOV, R1, 0),
   6897			BPF_ALU64_REG(BPF_LSH, R0, R1),
   6898			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   6899			BPF_EXIT_INSN(),
   6900		},
   6901		INTERNAL,
   6902		{ },
   6903		{ { 0, 0x01234567 } }
   6904	},
   6905	/* BPF_ALU | BPF_LSH | BPF_K */
   6906	{
   6907		"ALU_LSH_K: 1 << 1 = 2",
   6908		.u.insns_int = {
   6909			BPF_LD_IMM64(R0, 1),
   6910			BPF_ALU32_IMM(BPF_LSH, R0, 1),
   6911			BPF_EXIT_INSN(),
   6912		},
   6913		INTERNAL,
   6914		{ },
   6915		{ { 0, 2 } },
   6916	},
   6917	{
   6918		"ALU_LSH_K: 1 << 31 = 0x80000000",
   6919		.u.insns_int = {
   6920			BPF_LD_IMM64(R0, 1),
   6921			BPF_ALU32_IMM(BPF_LSH, R0, 31),
   6922			BPF_EXIT_INSN(),
   6923		},
   6924		INTERNAL,
   6925		{ },
   6926		{ { 0, 0x80000000 } },
   6927	},
   6928	{
   6929		"ALU_LSH_K: 0x12345678 << 12 = 0x45678000",
   6930		.u.insns_int = {
   6931			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
   6932			BPF_ALU32_IMM(BPF_LSH, R0, 12),
   6933			BPF_EXIT_INSN(),
   6934		},
   6935		INTERNAL,
   6936		{ },
   6937		{ { 0, 0x45678000 } }
   6938	},
   6939	{
   6940		"ALU_LSH_K: 0x12345678 << 0 = 0x12345678",
   6941		.u.insns_int = {
   6942			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
   6943			BPF_ALU32_IMM(BPF_LSH, R0, 0),
   6944			BPF_EXIT_INSN(),
   6945		},
   6946		INTERNAL,
   6947		{ },
   6948		{ { 0, 0x12345678 } }
   6949	},
   6950	{
   6951		"ALU64_LSH_K: 1 << 1 = 2",
   6952		.u.insns_int = {
   6953			BPF_LD_IMM64(R0, 1),
   6954			BPF_ALU64_IMM(BPF_LSH, R0, 1),
   6955			BPF_EXIT_INSN(),
   6956		},
   6957		INTERNAL,
   6958		{ },
   6959		{ { 0, 2 } },
   6960	},
   6961	{
   6962		"ALU64_LSH_K: 1 << 31 = 0x80000000",
   6963		.u.insns_int = {
   6964			BPF_LD_IMM64(R0, 1),
   6965			BPF_ALU64_IMM(BPF_LSH, R0, 31),
   6966			BPF_EXIT_INSN(),
   6967		},
   6968		INTERNAL,
   6969		{ },
   6970		{ { 0, 0x80000000 } },
   6971	},
   6972	{
   6973		"ALU64_LSH_K: Shift < 32, low word",
   6974		.u.insns_int = {
   6975			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6976			BPF_ALU64_IMM(BPF_LSH, R0, 12),
   6977			BPF_EXIT_INSN(),
   6978		},
   6979		INTERNAL,
   6980		{ },
   6981		{ { 0, 0xbcdef000 } }
   6982	},
   6983	{
   6984		"ALU64_LSH_K: Shift < 32, high word",
   6985		.u.insns_int = {
   6986			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6987			BPF_ALU64_IMM(BPF_LSH, R0, 12),
   6988			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   6989			BPF_EXIT_INSN(),
   6990		},
   6991		INTERNAL,
   6992		{ },
   6993		{ { 0, 0x3456789a } }
   6994	},
   6995	{
   6996		"ALU64_LSH_K: Shift > 32, low word",
   6997		.u.insns_int = {
   6998			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   6999			BPF_ALU64_IMM(BPF_LSH, R0, 36),
   7000			BPF_EXIT_INSN(),
   7001		},
   7002		INTERNAL,
   7003		{ },
   7004		{ { 0, 0 } }
   7005	},
   7006	{
   7007		"ALU64_LSH_K: Shift > 32, high word",
   7008		.u.insns_int = {
   7009			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   7010			BPF_ALU64_IMM(BPF_LSH, R0, 36),
   7011			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7012			BPF_EXIT_INSN(),
   7013		},
   7014		INTERNAL,
   7015		{ },
   7016		{ { 0, 0x9abcdef0 } }
   7017	},
   7018	{
   7019		"ALU64_LSH_K: Shift == 32, low word",
   7020		.u.insns_int = {
   7021			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   7022			BPF_ALU64_IMM(BPF_LSH, R0, 32),
   7023			BPF_EXIT_INSN(),
   7024		},
   7025		INTERNAL,
   7026		{ },
   7027		{ { 0, 0 } }
   7028	},
   7029	{
   7030		"ALU64_LSH_K: Shift == 32, high word",
   7031		.u.insns_int = {
   7032			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   7033			BPF_ALU64_IMM(BPF_LSH, R0, 32),
   7034			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7035			BPF_EXIT_INSN(),
   7036		},
   7037		INTERNAL,
   7038		{ },
   7039		{ { 0, 0x89abcdef } }
   7040	},
   7041	{
   7042		"ALU64_LSH_K: Zero shift",
   7043		.u.insns_int = {
   7044			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   7045			BPF_ALU64_IMM(BPF_LSH, R0, 0),
   7046			BPF_EXIT_INSN(),
   7047		},
   7048		INTERNAL,
   7049		{ },
   7050		{ { 0, 0x89abcdef } }
   7051	},
   7052	/* BPF_ALU | BPF_RSH | BPF_X */
   7053	{
   7054		"ALU_RSH_X: 2 >> 1 = 1",
   7055		.u.insns_int = {
   7056			BPF_LD_IMM64(R0, 2),
   7057			BPF_ALU32_IMM(BPF_MOV, R1, 1),
   7058			BPF_ALU32_REG(BPF_RSH, R0, R1),
   7059			BPF_EXIT_INSN(),
   7060		},
   7061		INTERNAL,
   7062		{ },
   7063		{ { 0, 1 } },
   7064	},
   7065	{
   7066		"ALU_RSH_X: 0x80000000 >> 31 = 1",
   7067		.u.insns_int = {
   7068			BPF_LD_IMM64(R0, 0x80000000),
   7069			BPF_ALU32_IMM(BPF_MOV, R1, 31),
   7070			BPF_ALU32_REG(BPF_RSH, R0, R1),
   7071			BPF_EXIT_INSN(),
   7072		},
   7073		INTERNAL,
   7074		{ },
   7075		{ { 0, 1 } },
   7076	},
   7077	{
   7078		"ALU_RSH_X: 0x12345678 >> 20 = 0x123",
   7079		.u.insns_int = {
   7080			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
   7081			BPF_ALU32_IMM(BPF_MOV, R1, 20),
   7082			BPF_ALU32_REG(BPF_RSH, R0, R1),
   7083			BPF_EXIT_INSN(),
   7084		},
   7085		INTERNAL,
   7086		{ },
   7087		{ { 0, 0x123 } }
   7088	},
   7089	{
   7090		"ALU64_RSH_X: 2 >> 1 = 1",
   7091		.u.insns_int = {
   7092			BPF_LD_IMM64(R0, 2),
   7093			BPF_ALU32_IMM(BPF_MOV, R1, 1),
   7094			BPF_ALU64_REG(BPF_RSH, R0, R1),
   7095			BPF_EXIT_INSN(),
   7096		},
   7097		INTERNAL,
   7098		{ },
   7099		{ { 0, 1 } },
   7100	},
   7101	{
   7102		"ALU64_RSH_X: 0x80000000 >> 31 = 1",
   7103		.u.insns_int = {
   7104			BPF_LD_IMM64(R0, 0x80000000),
   7105			BPF_ALU32_IMM(BPF_MOV, R1, 31),
   7106			BPF_ALU64_REG(BPF_RSH, R0, R1),
   7107			BPF_EXIT_INSN(),
   7108		},
   7109		INTERNAL,
   7110		{ },
   7111		{ { 0, 1 } },
   7112	},
   7113	{
   7114		"ALU64_RSH_X: Shift < 32, low word",
   7115		.u.insns_int = {
   7116			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7117			BPF_ALU32_IMM(BPF_MOV, R1, 12),
   7118			BPF_ALU64_REG(BPF_RSH, R0, R1),
   7119			BPF_EXIT_INSN(),
   7120		},
   7121		INTERNAL,
   7122		{ },
   7123		{ { 0, 0x56789abc } }
   7124	},
   7125	{
   7126		"ALU64_RSH_X: Shift < 32, high word",
   7127		.u.insns_int = {
   7128			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7129			BPF_ALU32_IMM(BPF_MOV, R1, 12),
   7130			BPF_ALU64_REG(BPF_RSH, R0, R1),
   7131			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7132			BPF_EXIT_INSN(),
   7133		},
   7134		INTERNAL,
   7135		{ },
   7136		{ { 0, 0x00081234 } }
   7137	},
   7138	{
   7139		"ALU64_RSH_X: Shift > 32, low word",
   7140		.u.insns_int = {
   7141			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7142			BPF_ALU32_IMM(BPF_MOV, R1, 36),
   7143			BPF_ALU64_REG(BPF_RSH, R0, R1),
   7144			BPF_EXIT_INSN(),
   7145		},
   7146		INTERNAL,
   7147		{ },
   7148		{ { 0, 0x08123456 } }
   7149	},
   7150	{
   7151		"ALU64_RSH_X: Shift > 32, high word",
   7152		.u.insns_int = {
   7153			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7154			BPF_ALU32_IMM(BPF_MOV, R1, 36),
   7155			BPF_ALU64_REG(BPF_RSH, R0, R1),
   7156			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7157			BPF_EXIT_INSN(),
   7158		},
   7159		INTERNAL,
   7160		{ },
   7161		{ { 0, 0 } }
   7162	},
   7163	{
   7164		"ALU64_RSH_X: Shift == 32, low word",
   7165		.u.insns_int = {
   7166			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7167			BPF_ALU32_IMM(BPF_MOV, R1, 32),
   7168			BPF_ALU64_REG(BPF_RSH, R0, R1),
   7169			BPF_EXIT_INSN(),
   7170		},
   7171		INTERNAL,
   7172		{ },
   7173		{ { 0, 0x81234567 } }
   7174	},
   7175	{
   7176		"ALU64_RSH_X: Shift == 32, high word",
   7177		.u.insns_int = {
   7178			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7179			BPF_ALU32_IMM(BPF_MOV, R1, 32),
   7180			BPF_ALU64_REG(BPF_RSH, R0, R1),
   7181			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7182			BPF_EXIT_INSN(),
   7183		},
   7184		INTERNAL,
   7185		{ },
   7186		{ { 0, 0 } }
   7187	},
   7188	{
   7189		"ALU64_RSH_X: Zero shift, low word",
   7190		.u.insns_int = {
   7191			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7192			BPF_ALU32_IMM(BPF_MOV, R1, 0),
   7193			BPF_ALU64_REG(BPF_RSH, R0, R1),
   7194			BPF_EXIT_INSN(),
   7195		},
   7196		INTERNAL,
   7197		{ },
   7198		{ { 0, 0x89abcdef } }
   7199	},
   7200	{
   7201		"ALU64_RSH_X: Zero shift, high word",
   7202		.u.insns_int = {
   7203			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7204			BPF_ALU32_IMM(BPF_MOV, R1, 0),
   7205			BPF_ALU64_REG(BPF_RSH, R0, R1),
   7206			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7207			BPF_EXIT_INSN(),
   7208		},
   7209		INTERNAL,
   7210		{ },
   7211		{ { 0, 0x81234567 } }
   7212	},
   7213	/* BPF_ALU | BPF_RSH | BPF_K */
   7214	{
   7215		"ALU_RSH_K: 2 >> 1 = 1",
   7216		.u.insns_int = {
   7217			BPF_LD_IMM64(R0, 2),
   7218			BPF_ALU32_IMM(BPF_RSH, R0, 1),
   7219			BPF_EXIT_INSN(),
   7220		},
   7221		INTERNAL,
   7222		{ },
   7223		{ { 0, 1 } },
   7224	},
   7225	{
   7226		"ALU_RSH_K: 0x80000000 >> 31 = 1",
   7227		.u.insns_int = {
   7228			BPF_LD_IMM64(R0, 0x80000000),
   7229			BPF_ALU32_IMM(BPF_RSH, R0, 31),
   7230			BPF_EXIT_INSN(),
   7231		},
   7232		INTERNAL,
   7233		{ },
   7234		{ { 0, 1 } },
   7235	},
   7236	{
   7237		"ALU_RSH_K: 0x12345678 >> 20 = 0x123",
   7238		.u.insns_int = {
   7239			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
   7240			BPF_ALU32_IMM(BPF_RSH, R0, 20),
   7241			BPF_EXIT_INSN(),
   7242		},
   7243		INTERNAL,
   7244		{ },
   7245		{ { 0, 0x123 } }
   7246	},
   7247	{
   7248		"ALU_RSH_K: 0x12345678 >> 0 = 0x12345678",
   7249		.u.insns_int = {
   7250			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
   7251			BPF_ALU32_IMM(BPF_RSH, R0, 0),
   7252			BPF_EXIT_INSN(),
   7253		},
   7254		INTERNAL,
   7255		{ },
   7256		{ { 0, 0x12345678 } }
   7257	},
   7258	{
   7259		"ALU64_RSH_K: 2 >> 1 = 1",
   7260		.u.insns_int = {
   7261			BPF_LD_IMM64(R0, 2),
   7262			BPF_ALU64_IMM(BPF_RSH, R0, 1),
   7263			BPF_EXIT_INSN(),
   7264		},
   7265		INTERNAL,
   7266		{ },
   7267		{ { 0, 1 } },
   7268	},
   7269	{
   7270		"ALU64_RSH_K: 0x80000000 >> 31 = 1",
   7271		.u.insns_int = {
   7272			BPF_LD_IMM64(R0, 0x80000000),
   7273			BPF_ALU64_IMM(BPF_RSH, R0, 31),
   7274			BPF_EXIT_INSN(),
   7275		},
   7276		INTERNAL,
   7277		{ },
   7278		{ { 0, 1 } },
   7279	},
   7280	{
   7281		"ALU64_RSH_K: Shift < 32, low word",
   7282		.u.insns_int = {
   7283			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7284			BPF_ALU64_IMM(BPF_RSH, R0, 12),
   7285			BPF_EXIT_INSN(),
   7286		},
   7287		INTERNAL,
   7288		{ },
   7289		{ { 0, 0x56789abc } }
   7290	},
   7291	{
   7292		"ALU64_RSH_K: Shift < 32, high word",
   7293		.u.insns_int = {
   7294			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7295			BPF_ALU64_IMM(BPF_RSH, R0, 12),
   7296			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7297			BPF_EXIT_INSN(),
   7298		},
   7299		INTERNAL,
   7300		{ },
   7301		{ { 0, 0x00081234 } }
   7302	},
   7303	{
   7304		"ALU64_RSH_K: Shift > 32, low word",
   7305		.u.insns_int = {
   7306			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7307			BPF_ALU64_IMM(BPF_RSH, R0, 36),
   7308			BPF_EXIT_INSN(),
   7309		},
   7310		INTERNAL,
   7311		{ },
   7312		{ { 0, 0x08123456 } }
   7313	},
   7314	{
   7315		"ALU64_RSH_K: Shift > 32, high word",
   7316		.u.insns_int = {
   7317			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7318			BPF_ALU64_IMM(BPF_RSH, R0, 36),
   7319			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7320			BPF_EXIT_INSN(),
   7321		},
   7322		INTERNAL,
   7323		{ },
   7324		{ { 0, 0 } }
   7325	},
   7326	{
   7327		"ALU64_RSH_K: Shift == 32, low word",
   7328		.u.insns_int = {
   7329			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7330			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7331			BPF_EXIT_INSN(),
   7332		},
   7333		INTERNAL,
   7334		{ },
   7335		{ { 0, 0x81234567 } }
   7336	},
   7337	{
   7338		"ALU64_RSH_K: Shift == 32, high word",
   7339		.u.insns_int = {
   7340			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7341			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7342			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7343			BPF_EXIT_INSN(),
   7344		},
   7345		INTERNAL,
   7346		{ },
   7347		{ { 0, 0 } }
   7348	},
   7349	{
   7350		"ALU64_RSH_K: Zero shift",
   7351		.u.insns_int = {
   7352			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   7353			BPF_ALU64_IMM(BPF_RSH, R0, 0),
   7354			BPF_EXIT_INSN(),
   7355		},
   7356		INTERNAL,
   7357		{ },
   7358		{ { 0, 0x89abcdef } }
   7359	},
   7360	/* BPF_ALU | BPF_ARSH | BPF_X */
   7361	{
   7362		"ALU32_ARSH_X: -1234 >> 7 = -10",
   7363		.u.insns_int = {
   7364			BPF_ALU32_IMM(BPF_MOV, R0, -1234),
   7365			BPF_ALU32_IMM(BPF_MOV, R1, 7),
   7366			BPF_ALU32_REG(BPF_ARSH, R0, R1),
   7367			BPF_EXIT_INSN(),
   7368		},
   7369		INTERNAL,
   7370		{ },
   7371		{ { 0, -10 } }
   7372	},
   7373	{
   7374		"ALU64_ARSH_X: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
   7375		.u.insns_int = {
   7376			BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
   7377			BPF_ALU32_IMM(BPF_MOV, R1, 40),
   7378			BPF_ALU64_REG(BPF_ARSH, R0, R1),
   7379			BPF_EXIT_INSN(),
   7380		},
   7381		INTERNAL,
   7382		{ },
   7383		{ { 0, 0xffff00ff } },
   7384	},
   7385	{
   7386		"ALU64_ARSH_X: Shift < 32, low word",
   7387		.u.insns_int = {
   7388			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7389			BPF_ALU32_IMM(BPF_MOV, R1, 12),
   7390			BPF_ALU64_REG(BPF_ARSH, R0, R1),
   7391			BPF_EXIT_INSN(),
   7392		},
   7393		INTERNAL,
   7394		{ },
   7395		{ { 0, 0x56789abc } }
   7396	},
   7397	{
   7398		"ALU64_ARSH_X: Shift < 32, high word",
   7399		.u.insns_int = {
   7400			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7401			BPF_ALU32_IMM(BPF_MOV, R1, 12),
   7402			BPF_ALU64_REG(BPF_ARSH, R0, R1),
   7403			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7404			BPF_EXIT_INSN(),
   7405		},
   7406		INTERNAL,
   7407		{ },
   7408		{ { 0, 0xfff81234 } }
   7409	},
   7410	{
   7411		"ALU64_ARSH_X: Shift > 32, low word",
   7412		.u.insns_int = {
   7413			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7414			BPF_ALU32_IMM(BPF_MOV, R1, 36),
   7415			BPF_ALU64_REG(BPF_ARSH, R0, R1),
   7416			BPF_EXIT_INSN(),
   7417		},
   7418		INTERNAL,
   7419		{ },
   7420		{ { 0, 0xf8123456 } }
   7421	},
   7422	{
   7423		"ALU64_ARSH_X: Shift > 32, high word",
   7424		.u.insns_int = {
   7425			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7426			BPF_ALU32_IMM(BPF_MOV, R1, 36),
   7427			BPF_ALU64_REG(BPF_ARSH, R0, R1),
   7428			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7429			BPF_EXIT_INSN(),
   7430		},
   7431		INTERNAL,
   7432		{ },
   7433		{ { 0, -1 } }
   7434	},
   7435	{
   7436		"ALU64_ARSH_X: Shift == 32, low word",
   7437		.u.insns_int = {
   7438			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7439			BPF_ALU32_IMM(BPF_MOV, R1, 32),
   7440			BPF_ALU64_REG(BPF_ARSH, R0, R1),
   7441			BPF_EXIT_INSN(),
   7442		},
   7443		INTERNAL,
   7444		{ },
   7445		{ { 0, 0x81234567 } }
   7446	},
   7447	{
   7448		"ALU64_ARSH_X: Shift == 32, high word",
   7449		.u.insns_int = {
   7450			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7451			BPF_ALU32_IMM(BPF_MOV, R1, 32),
   7452			BPF_ALU64_REG(BPF_ARSH, R0, R1),
   7453			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7454			BPF_EXIT_INSN(),
   7455		},
   7456		INTERNAL,
   7457		{ },
   7458		{ { 0, -1 } }
   7459	},
   7460	{
   7461		"ALU64_ARSH_X: Zero shift, low word",
   7462		.u.insns_int = {
   7463			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7464			BPF_ALU32_IMM(BPF_MOV, R1, 0),
   7465			BPF_ALU64_REG(BPF_ARSH, R0, R1),
   7466			BPF_EXIT_INSN(),
   7467		},
   7468		INTERNAL,
   7469		{ },
   7470		{ { 0, 0x89abcdef } }
   7471	},
   7472	{
   7473		"ALU64_ARSH_X: Zero shift, high word",
   7474		.u.insns_int = {
   7475			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7476			BPF_ALU32_IMM(BPF_MOV, R1, 0),
   7477			BPF_ALU64_REG(BPF_ARSH, R0, R1),
   7478			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7479			BPF_EXIT_INSN(),
   7480		},
   7481		INTERNAL,
   7482		{ },
   7483		{ { 0, 0x81234567 } }
   7484	},
   7485	/* BPF_ALU | BPF_ARSH | BPF_K */
   7486	{
   7487		"ALU32_ARSH_K: -1234 >> 7 = -10",
   7488		.u.insns_int = {
   7489			BPF_ALU32_IMM(BPF_MOV, R0, -1234),
   7490			BPF_ALU32_IMM(BPF_ARSH, R0, 7),
   7491			BPF_EXIT_INSN(),
   7492		},
   7493		INTERNAL,
   7494		{ },
   7495		{ { 0, -10 } }
   7496	},
   7497	{
   7498		"ALU32_ARSH_K: -1234 >> 0 = -1234",
   7499		.u.insns_int = {
   7500			BPF_ALU32_IMM(BPF_MOV, R0, -1234),
   7501			BPF_ALU32_IMM(BPF_ARSH, R0, 0),
   7502			BPF_EXIT_INSN(),
   7503		},
   7504		INTERNAL,
   7505		{ },
   7506		{ { 0, -1234 } }
   7507	},
   7508	{
   7509		"ALU64_ARSH_K: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
   7510		.u.insns_int = {
   7511			BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
   7512			BPF_ALU64_IMM(BPF_ARSH, R0, 40),
   7513			BPF_EXIT_INSN(),
   7514		},
   7515		INTERNAL,
   7516		{ },
   7517		{ { 0, 0xffff00ff } },
   7518	},
   7519	{
   7520		"ALU64_ARSH_K: Shift < 32, low word",
   7521		.u.insns_int = {
   7522			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7523			BPF_ALU64_IMM(BPF_RSH, R0, 12),
   7524			BPF_EXIT_INSN(),
   7525		},
   7526		INTERNAL,
   7527		{ },
   7528		{ { 0, 0x56789abc } }
   7529	},
   7530	{
   7531		"ALU64_ARSH_K: Shift < 32, high word",
   7532		.u.insns_int = {
   7533			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7534			BPF_ALU64_IMM(BPF_ARSH, R0, 12),
   7535			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7536			BPF_EXIT_INSN(),
   7537		},
   7538		INTERNAL,
   7539		{ },
   7540		{ { 0, 0xfff81234 } }
   7541	},
   7542	{
   7543		"ALU64_ARSH_K: Shift > 32, low word",
   7544		.u.insns_int = {
   7545			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7546			BPF_ALU64_IMM(BPF_ARSH, R0, 36),
   7547			BPF_EXIT_INSN(),
   7548		},
   7549		INTERNAL,
   7550		{ },
   7551		{ { 0, 0xf8123456 } }
   7552	},
   7553	{
   7554		"ALU64_ARSH_K: Shift > 32, high word",
   7555		.u.insns_int = {
   7556			BPF_LD_IMM64(R0, 0xf123456789abcdefLL),
   7557			BPF_ALU64_IMM(BPF_ARSH, R0, 36),
   7558			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7559			BPF_EXIT_INSN(),
   7560		},
   7561		INTERNAL,
   7562		{ },
   7563		{ { 0, -1 } }
   7564	},
   7565	{
   7566		"ALU64_ARSH_K: Shift == 32, low word",
   7567		.u.insns_int = {
   7568			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7569			BPF_ALU64_IMM(BPF_ARSH, R0, 32),
   7570			BPF_EXIT_INSN(),
   7571		},
   7572		INTERNAL,
   7573		{ },
   7574		{ { 0, 0x81234567 } }
   7575	},
   7576	{
   7577		"ALU64_ARSH_K: Shift == 32, high word",
   7578		.u.insns_int = {
   7579			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7580			BPF_ALU64_IMM(BPF_ARSH, R0, 32),
   7581			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7582			BPF_EXIT_INSN(),
   7583		},
   7584		INTERNAL,
   7585		{ },
   7586		{ { 0, -1 } }
   7587	},
   7588	{
   7589		"ALU64_ARSH_K: Zero shift",
   7590		.u.insns_int = {
   7591			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
   7592			BPF_ALU64_IMM(BPF_ARSH, R0, 0),
   7593			BPF_EXIT_INSN(),
   7594		},
   7595		INTERNAL,
   7596		{ },
   7597		{ { 0, 0x89abcdef } }
   7598	},
   7599	/* BPF_ALU | BPF_NEG */
   7600	{
   7601		"ALU_NEG: -(3) = -3",
   7602		.u.insns_int = {
   7603			BPF_ALU32_IMM(BPF_MOV, R0, 3),
   7604			BPF_ALU32_IMM(BPF_NEG, R0, 0),
   7605			BPF_EXIT_INSN(),
   7606		},
   7607		INTERNAL,
   7608		{ },
   7609		{ { 0, -3 } },
   7610	},
   7611	{
   7612		"ALU_NEG: -(-3) = 3",
   7613		.u.insns_int = {
   7614			BPF_ALU32_IMM(BPF_MOV, R0, -3),
   7615			BPF_ALU32_IMM(BPF_NEG, R0, 0),
   7616			BPF_EXIT_INSN(),
   7617		},
   7618		INTERNAL,
   7619		{ },
   7620		{ { 0, 3 } },
   7621	},
   7622	{
   7623		"ALU64_NEG: -(3) = -3",
   7624		.u.insns_int = {
   7625			BPF_LD_IMM64(R0, 3),
   7626			BPF_ALU64_IMM(BPF_NEG, R0, 0),
   7627			BPF_EXIT_INSN(),
   7628		},
   7629		INTERNAL,
   7630		{ },
   7631		{ { 0, -3 } },
   7632	},
   7633	{
   7634		"ALU64_NEG: -(-3) = 3",
   7635		.u.insns_int = {
   7636			BPF_LD_IMM64(R0, -3),
   7637			BPF_ALU64_IMM(BPF_NEG, R0, 0),
   7638			BPF_EXIT_INSN(),
   7639		},
   7640		INTERNAL,
   7641		{ },
   7642		{ { 0, 3 } },
   7643	},
   7644	/* BPF_ALU | BPF_END | BPF_FROM_BE */
   7645	{
   7646		"ALU_END_FROM_BE 16: 0x0123456789abcdef -> 0xcdef",
   7647		.u.insns_int = {
   7648			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   7649			BPF_ENDIAN(BPF_FROM_BE, R0, 16),
   7650			BPF_EXIT_INSN(),
   7651		},
   7652		INTERNAL,
   7653		{ },
   7654		{ { 0,  cpu_to_be16(0xcdef) } },
   7655	},
   7656	{
   7657		"ALU_END_FROM_BE 32: 0x0123456789abcdef -> 0x89abcdef",
   7658		.u.insns_int = {
   7659			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   7660			BPF_ENDIAN(BPF_FROM_BE, R0, 32),
   7661			BPF_ALU64_REG(BPF_MOV, R1, R0),
   7662			BPF_ALU64_IMM(BPF_RSH, R1, 32),
   7663			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
   7664			BPF_EXIT_INSN(),
   7665		},
   7666		INTERNAL,
   7667		{ },
   7668		{ { 0, cpu_to_be32(0x89abcdef) } },
   7669	},
   7670	{
   7671		"ALU_END_FROM_BE 64: 0x0123456789abcdef -> 0x89abcdef",
   7672		.u.insns_int = {
   7673			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   7674			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
   7675			BPF_EXIT_INSN(),
   7676		},
   7677		INTERNAL,
   7678		{ },
   7679		{ { 0, (u32) cpu_to_be64(0x0123456789abcdefLL) } },
   7680	},
   7681	{
   7682		"ALU_END_FROM_BE 64: 0x0123456789abcdef >> 32 -> 0x01234567",
   7683		.u.insns_int = {
   7684			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   7685			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
   7686			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7687			BPF_EXIT_INSN(),
   7688		},
   7689		INTERNAL,
   7690		{ },
   7691		{ { 0, (u32) (cpu_to_be64(0x0123456789abcdefLL) >> 32) } },
   7692	},
   7693	/* BPF_ALU | BPF_END | BPF_FROM_BE, reversed */
   7694	{
   7695		"ALU_END_FROM_BE 16: 0xfedcba9876543210 -> 0x3210",
   7696		.u.insns_int = {
   7697			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
   7698			BPF_ENDIAN(BPF_FROM_BE, R0, 16),
   7699			BPF_EXIT_INSN(),
   7700		},
   7701		INTERNAL,
   7702		{ },
   7703		{ { 0,  cpu_to_be16(0x3210) } },
   7704	},
   7705	{
   7706		"ALU_END_FROM_BE 32: 0xfedcba9876543210 -> 0x76543210",
   7707		.u.insns_int = {
   7708			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
   7709			BPF_ENDIAN(BPF_FROM_BE, R0, 32),
   7710			BPF_ALU64_REG(BPF_MOV, R1, R0),
   7711			BPF_ALU64_IMM(BPF_RSH, R1, 32),
   7712			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
   7713			BPF_EXIT_INSN(),
   7714		},
   7715		INTERNAL,
   7716		{ },
   7717		{ { 0, cpu_to_be32(0x76543210) } },
   7718	},
   7719	{
   7720		"ALU_END_FROM_BE 64: 0xfedcba9876543210 -> 0x76543210",
   7721		.u.insns_int = {
   7722			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
   7723			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
   7724			BPF_EXIT_INSN(),
   7725		},
   7726		INTERNAL,
   7727		{ },
   7728		{ { 0, (u32) cpu_to_be64(0xfedcba9876543210ULL) } },
   7729	},
   7730	{
   7731		"ALU_END_FROM_BE 64: 0xfedcba9876543210 >> 32 -> 0xfedcba98",
   7732		.u.insns_int = {
   7733			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
   7734			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
   7735			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7736			BPF_EXIT_INSN(),
   7737		},
   7738		INTERNAL,
   7739		{ },
   7740		{ { 0, (u32) (cpu_to_be64(0xfedcba9876543210ULL) >> 32) } },
   7741	},
   7742	/* BPF_ALU | BPF_END | BPF_FROM_LE */
   7743	{
   7744		"ALU_END_FROM_LE 16: 0x0123456789abcdef -> 0xefcd",
   7745		.u.insns_int = {
   7746			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   7747			BPF_ENDIAN(BPF_FROM_LE, R0, 16),
   7748			BPF_EXIT_INSN(),
   7749		},
   7750		INTERNAL,
   7751		{ },
   7752		{ { 0, cpu_to_le16(0xcdef) } },
   7753	},
   7754	{
   7755		"ALU_END_FROM_LE 32: 0x0123456789abcdef -> 0xefcdab89",
   7756		.u.insns_int = {
   7757			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   7758			BPF_ENDIAN(BPF_FROM_LE, R0, 32),
   7759			BPF_ALU64_REG(BPF_MOV, R1, R0),
   7760			BPF_ALU64_IMM(BPF_RSH, R1, 32),
   7761			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
   7762			BPF_EXIT_INSN(),
   7763		},
   7764		INTERNAL,
   7765		{ },
   7766		{ { 0, cpu_to_le32(0x89abcdef) } },
   7767	},
   7768	{
   7769		"ALU_END_FROM_LE 64: 0x0123456789abcdef -> 0x67452301",
   7770		.u.insns_int = {
   7771			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   7772			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
   7773			BPF_EXIT_INSN(),
   7774		},
   7775		INTERNAL,
   7776		{ },
   7777		{ { 0, (u32) cpu_to_le64(0x0123456789abcdefLL) } },
   7778	},
   7779	{
   7780		"ALU_END_FROM_LE 64: 0x0123456789abcdef >> 32 -> 0xefcdab89",
   7781		.u.insns_int = {
   7782			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
   7783			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
   7784			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7785			BPF_EXIT_INSN(),
   7786		},
   7787		INTERNAL,
   7788		{ },
   7789		{ { 0, (u32) (cpu_to_le64(0x0123456789abcdefLL) >> 32) } },
   7790	},
   7791	/* BPF_ALU | BPF_END | BPF_FROM_LE, reversed */
   7792	{
   7793		"ALU_END_FROM_LE 16: 0xfedcba9876543210 -> 0x1032",
   7794		.u.insns_int = {
   7795			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
   7796			BPF_ENDIAN(BPF_FROM_LE, R0, 16),
   7797			BPF_EXIT_INSN(),
   7798		},
   7799		INTERNAL,
   7800		{ },
   7801		{ { 0,  cpu_to_le16(0x3210) } },
   7802	},
   7803	{
   7804		"ALU_END_FROM_LE 32: 0xfedcba9876543210 -> 0x10325476",
   7805		.u.insns_int = {
   7806			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
   7807			BPF_ENDIAN(BPF_FROM_LE, R0, 32),
   7808			BPF_ALU64_REG(BPF_MOV, R1, R0),
   7809			BPF_ALU64_IMM(BPF_RSH, R1, 32),
   7810			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
   7811			BPF_EXIT_INSN(),
   7812		},
   7813		INTERNAL,
   7814		{ },
   7815		{ { 0, cpu_to_le32(0x76543210) } },
   7816	},
   7817	{
   7818		"ALU_END_FROM_LE 64: 0xfedcba9876543210 -> 0x10325476",
   7819		.u.insns_int = {
   7820			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
   7821			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
   7822			BPF_EXIT_INSN(),
   7823		},
   7824		INTERNAL,
   7825		{ },
   7826		{ { 0, (u32) cpu_to_le64(0xfedcba9876543210ULL) } },
   7827	},
   7828	{
   7829		"ALU_END_FROM_LE 64: 0xfedcba9876543210 >> 32 -> 0x98badcfe",
   7830		.u.insns_int = {
   7831			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
   7832			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
   7833			BPF_ALU64_IMM(BPF_RSH, R0, 32),
   7834			BPF_EXIT_INSN(),
   7835		},
   7836		INTERNAL,
   7837		{ },
   7838		{ { 0, (u32) (cpu_to_le64(0xfedcba9876543210ULL) >> 32) } },
   7839	},
   7840	/* BPF_LDX_MEM B/H/W/DW */
   7841	{
   7842		"BPF_LDX_MEM | BPF_B, base",
   7843		.u.insns_int = {
   7844			BPF_LD_IMM64(R1, 0x0102030405060708ULL),
   7845			BPF_LD_IMM64(R2, 0x0000000000000008ULL),
   7846			BPF_STX_MEM(BPF_DW, R10, R1, -8),
   7847#ifdef __BIG_ENDIAN
   7848			BPF_LDX_MEM(BPF_B, R0, R10, -1),
   7849#else
   7850			BPF_LDX_MEM(BPF_B, R0, R10, -8),
   7851#endif
   7852			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
   7853			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   7854			BPF_EXIT_INSN(),
   7855		},
   7856		INTERNAL,
   7857		{ },
   7858		{ { 0, 0 } },
   7859		.stack_depth = 8,
   7860	},
   7861	{
   7862		"BPF_LDX_MEM | BPF_B, MSB set",
   7863		.u.insns_int = {
   7864			BPF_LD_IMM64(R1, 0x8182838485868788ULL),
   7865			BPF_LD_IMM64(R2, 0x0000000000000088ULL),
   7866			BPF_STX_MEM(BPF_DW, R10, R1, -8),
   7867#ifdef __BIG_ENDIAN
   7868			BPF_LDX_MEM(BPF_B, R0, R10, -1),
   7869#else
   7870			BPF_LDX_MEM(BPF_B, R0, R10, -8),
   7871#endif
   7872			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
   7873			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   7874			BPF_EXIT_INSN(),
   7875		},
   7876		INTERNAL,
   7877		{ },
   7878		{ { 0, 0 } },
   7879		.stack_depth = 8,
   7880	},
   7881	{
   7882		"BPF_LDX_MEM | BPF_B, negative offset",
   7883		.u.insns_int = {
   7884			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   7885			BPF_LD_IMM64(R3, 0x0000000000000088ULL),
   7886			BPF_ALU64_IMM(BPF_ADD, R1, 512),
   7887			BPF_STX_MEM(BPF_B, R1, R2, -256),
   7888			BPF_LDX_MEM(BPF_B, R0, R1, -256),
   7889			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   7890			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   7891			BPF_EXIT_INSN(),
   7892		},
   7893		INTERNAL | FLAG_LARGE_MEM,
   7894		{ },
   7895		{ { 512, 0 } },
   7896		.stack_depth = 0,
   7897	},
   7898	{
   7899		"BPF_LDX_MEM | BPF_B, small positive offset",
   7900		.u.insns_int = {
   7901			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   7902			BPF_LD_IMM64(R3, 0x0000000000000088ULL),
   7903			BPF_STX_MEM(BPF_B, R1, R2, 256),
   7904			BPF_LDX_MEM(BPF_B, R0, R1, 256),
   7905			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   7906			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   7907			BPF_EXIT_INSN(),
   7908		},
   7909		INTERNAL | FLAG_LARGE_MEM,
   7910		{ },
   7911		{ { 512, 0 } },
   7912		.stack_depth = 0,
   7913	},
   7914	{
   7915		"BPF_LDX_MEM | BPF_B, large positive offset",
   7916		.u.insns_int = {
   7917			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   7918			BPF_LD_IMM64(R3, 0x0000000000000088ULL),
   7919			BPF_STX_MEM(BPF_B, R1, R2, 4096),
   7920			BPF_LDX_MEM(BPF_B, R0, R1, 4096),
   7921			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   7922			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   7923			BPF_EXIT_INSN(),
   7924		},
   7925		INTERNAL | FLAG_LARGE_MEM,
   7926		{ },
   7927		{ { 4096 + 16, 0 } },
   7928		.stack_depth = 0,
   7929	},
   7930	{
   7931		"BPF_LDX_MEM | BPF_H, base",
   7932		.u.insns_int = {
   7933			BPF_LD_IMM64(R1, 0x0102030405060708ULL),
   7934			BPF_LD_IMM64(R2, 0x0000000000000708ULL),
   7935			BPF_STX_MEM(BPF_DW, R10, R1, -8),
   7936#ifdef __BIG_ENDIAN
   7937			BPF_LDX_MEM(BPF_H, R0, R10, -2),
   7938#else
   7939			BPF_LDX_MEM(BPF_H, R0, R10, -8),
   7940#endif
   7941			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
   7942			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   7943			BPF_EXIT_INSN(),
   7944		},
   7945		INTERNAL,
   7946		{ },
   7947		{ { 0, 0 } },
   7948		.stack_depth = 8,
   7949	},
   7950	{
   7951		"BPF_LDX_MEM | BPF_H, MSB set",
   7952		.u.insns_int = {
   7953			BPF_LD_IMM64(R1, 0x8182838485868788ULL),
   7954			BPF_LD_IMM64(R2, 0x0000000000008788ULL),
   7955			BPF_STX_MEM(BPF_DW, R10, R1, -8),
   7956#ifdef __BIG_ENDIAN
   7957			BPF_LDX_MEM(BPF_H, R0, R10, -2),
   7958#else
   7959			BPF_LDX_MEM(BPF_H, R0, R10, -8),
   7960#endif
   7961			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
   7962			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   7963			BPF_EXIT_INSN(),
   7964		},
   7965		INTERNAL,
   7966		{ },
   7967		{ { 0, 0 } },
   7968		.stack_depth = 8,
   7969	},
   7970	{
   7971		"BPF_LDX_MEM | BPF_H, negative offset",
   7972		.u.insns_int = {
   7973			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   7974			BPF_LD_IMM64(R3, 0x0000000000008788ULL),
   7975			BPF_ALU64_IMM(BPF_ADD, R1, 512),
   7976			BPF_STX_MEM(BPF_H, R1, R2, -256),
   7977			BPF_LDX_MEM(BPF_H, R0, R1, -256),
   7978			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   7979			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   7980			BPF_EXIT_INSN(),
   7981		},
   7982		INTERNAL | FLAG_LARGE_MEM,
   7983		{ },
   7984		{ { 512, 0 } },
   7985		.stack_depth = 0,
   7986	},
   7987	{
   7988		"BPF_LDX_MEM | BPF_H, small positive offset",
   7989		.u.insns_int = {
   7990			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   7991			BPF_LD_IMM64(R3, 0x0000000000008788ULL),
   7992			BPF_STX_MEM(BPF_H, R1, R2, 256),
   7993			BPF_LDX_MEM(BPF_H, R0, R1, 256),
   7994			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   7995			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   7996			BPF_EXIT_INSN(),
   7997		},
   7998		INTERNAL | FLAG_LARGE_MEM,
   7999		{ },
   8000		{ { 512, 0 } },
   8001		.stack_depth = 0,
   8002	},
   8003	{
   8004		"BPF_LDX_MEM | BPF_H, large positive offset",
   8005		.u.insns_int = {
   8006			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   8007			BPF_LD_IMM64(R3, 0x0000000000008788ULL),
   8008			BPF_STX_MEM(BPF_H, R1, R2, 8192),
   8009			BPF_LDX_MEM(BPF_H, R0, R1, 8192),
   8010			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   8011			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8012			BPF_EXIT_INSN(),
   8013		},
   8014		INTERNAL | FLAG_LARGE_MEM,
   8015		{ },
   8016		{ { 8192 + 16, 0 } },
   8017		.stack_depth = 0,
   8018	},
   8019	{
   8020		"BPF_LDX_MEM | BPF_H, unaligned positive offset",
   8021		.u.insns_int = {
   8022			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   8023			BPF_LD_IMM64(R3, 0x0000000000008788ULL),
   8024			BPF_STX_MEM(BPF_H, R1, R2, 13),
   8025			BPF_LDX_MEM(BPF_H, R0, R1, 13),
   8026			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   8027			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8028			BPF_EXIT_INSN(),
   8029		},
   8030		INTERNAL | FLAG_LARGE_MEM,
   8031		{ },
   8032		{ { 32, 0 } },
   8033		.stack_depth = 0,
   8034	},
   8035	{
   8036		"BPF_LDX_MEM | BPF_W, base",
   8037		.u.insns_int = {
   8038			BPF_LD_IMM64(R1, 0x0102030405060708ULL),
   8039			BPF_LD_IMM64(R2, 0x0000000005060708ULL),
   8040			BPF_STX_MEM(BPF_DW, R10, R1, -8),
   8041#ifdef __BIG_ENDIAN
   8042			BPF_LDX_MEM(BPF_W, R0, R10, -4),
   8043#else
   8044			BPF_LDX_MEM(BPF_W, R0, R10, -8),
   8045#endif
   8046			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
   8047			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8048			BPF_EXIT_INSN(),
   8049		},
   8050		INTERNAL,
   8051		{ },
   8052		{ { 0, 0 } },
   8053		.stack_depth = 8,
   8054	},
   8055	{
   8056		"BPF_LDX_MEM | BPF_W, MSB set",
   8057		.u.insns_int = {
   8058			BPF_LD_IMM64(R1, 0x8182838485868788ULL),
   8059			BPF_LD_IMM64(R2, 0x0000000085868788ULL),
   8060			BPF_STX_MEM(BPF_DW, R10, R1, -8),
   8061#ifdef __BIG_ENDIAN
   8062			BPF_LDX_MEM(BPF_W, R0, R10, -4),
   8063#else
   8064			BPF_LDX_MEM(BPF_W, R0, R10, -8),
   8065#endif
   8066			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
   8067			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8068			BPF_EXIT_INSN(),
   8069		},
   8070		INTERNAL,
   8071		{ },
   8072		{ { 0, 0 } },
   8073		.stack_depth = 8,
   8074	},
   8075	{
   8076		"BPF_LDX_MEM | BPF_W, negative offset",
   8077		.u.insns_int = {
   8078			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   8079			BPF_LD_IMM64(R3, 0x0000000085868788ULL),
   8080			BPF_ALU64_IMM(BPF_ADD, R1, 512),
   8081			BPF_STX_MEM(BPF_W, R1, R2, -256),
   8082			BPF_LDX_MEM(BPF_W, R0, R1, -256),
   8083			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   8084			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8085			BPF_EXIT_INSN(),
   8086		},
   8087		INTERNAL | FLAG_LARGE_MEM,
   8088		{ },
   8089		{ { 512, 0 } },
   8090		.stack_depth = 0,
   8091	},
   8092	{
   8093		"BPF_LDX_MEM | BPF_W, small positive offset",
   8094		.u.insns_int = {
   8095			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   8096			BPF_LD_IMM64(R3, 0x0000000085868788ULL),
   8097			BPF_STX_MEM(BPF_W, R1, R2, 256),
   8098			BPF_LDX_MEM(BPF_W, R0, R1, 256),
   8099			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   8100			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8101			BPF_EXIT_INSN(),
   8102		},
   8103		INTERNAL | FLAG_LARGE_MEM,
   8104		{ },
   8105		{ { 512, 0 } },
   8106		.stack_depth = 0,
   8107	},
   8108	{
   8109		"BPF_LDX_MEM | BPF_W, large positive offset",
   8110		.u.insns_int = {
   8111			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   8112			BPF_LD_IMM64(R3, 0x0000000085868788ULL),
   8113			BPF_STX_MEM(BPF_W, R1, R2, 16384),
   8114			BPF_LDX_MEM(BPF_W, R0, R1, 16384),
   8115			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   8116			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8117			BPF_EXIT_INSN(),
   8118		},
   8119		INTERNAL | FLAG_LARGE_MEM,
   8120		{ },
   8121		{ { 16384 + 16, 0 } },
   8122		.stack_depth = 0,
   8123	},
   8124	{
   8125		"BPF_LDX_MEM | BPF_W, unaligned positive offset",
   8126		.u.insns_int = {
   8127			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   8128			BPF_LD_IMM64(R3, 0x0000000085868788ULL),
   8129			BPF_STX_MEM(BPF_W, R1, R2, 13),
   8130			BPF_LDX_MEM(BPF_W, R0, R1, 13),
   8131			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   8132			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8133			BPF_EXIT_INSN(),
   8134		},
   8135		INTERNAL | FLAG_LARGE_MEM,
   8136		{ },
   8137		{ { 32, 0 } },
   8138		.stack_depth = 0,
   8139	},
   8140	{
   8141		"BPF_LDX_MEM | BPF_DW, base",
   8142		.u.insns_int = {
   8143			BPF_LD_IMM64(R1, 0x0102030405060708ULL),
   8144			BPF_STX_MEM(BPF_DW, R10, R1, -8),
   8145			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
   8146			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
   8147			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8148			BPF_EXIT_INSN(),
   8149		},
   8150		INTERNAL,
   8151		{ },
   8152		{ { 0, 0 } },
   8153		.stack_depth = 8,
   8154	},
   8155	{
   8156		"BPF_LDX_MEM | BPF_DW, MSB set",
   8157		.u.insns_int = {
   8158			BPF_LD_IMM64(R1, 0x8182838485868788ULL),
   8159			BPF_STX_MEM(BPF_DW, R10, R1, -8),
   8160			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
   8161			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
   8162			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8163			BPF_EXIT_INSN(),
   8164		},
   8165		INTERNAL,
   8166		{ },
   8167		{ { 0, 0 } },
   8168		.stack_depth = 8,
   8169	},
   8170	{
   8171		"BPF_LDX_MEM | BPF_DW, negative offset",
   8172		.u.insns_int = {
   8173			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   8174			BPF_ALU64_IMM(BPF_ADD, R1, 512),
   8175			BPF_STX_MEM(BPF_DW, R1, R2, -256),
   8176			BPF_LDX_MEM(BPF_DW, R0, R1, -256),
   8177			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
   8178			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8179			BPF_EXIT_INSN(),
   8180		},
   8181		INTERNAL | FLAG_LARGE_MEM,
   8182		{ },
   8183		{ { 512, 0 } },
   8184		.stack_depth = 0,
   8185	},
   8186	{
   8187		"BPF_LDX_MEM | BPF_DW, small positive offset",
   8188		.u.insns_int = {
   8189			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   8190			BPF_STX_MEM(BPF_DW, R1, R2, 256),
   8191			BPF_LDX_MEM(BPF_DW, R0, R1, 256),
   8192			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
   8193			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8194			BPF_EXIT_INSN(),
   8195		},
   8196		INTERNAL | FLAG_LARGE_MEM,
   8197		{ },
   8198		{ { 512, 0 } },
   8199		.stack_depth = 8,
   8200	},
   8201	{
   8202		"BPF_LDX_MEM | BPF_DW, large positive offset",
   8203		.u.insns_int = {
   8204			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   8205			BPF_STX_MEM(BPF_DW, R1, R2, 32760),
   8206			BPF_LDX_MEM(BPF_DW, R0, R1, 32760),
   8207			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
   8208			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8209			BPF_EXIT_INSN(),
   8210		},
   8211		INTERNAL | FLAG_LARGE_MEM,
   8212		{ },
   8213		{ { 32768, 0 } },
   8214		.stack_depth = 0,
   8215	},
   8216	{
   8217		"BPF_LDX_MEM | BPF_DW, unaligned positive offset",
   8218		.u.insns_int = {
   8219			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   8220			BPF_STX_MEM(BPF_DW, R1, R2, 13),
   8221			BPF_LDX_MEM(BPF_DW, R0, R1, 13),
   8222			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
   8223			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8224			BPF_EXIT_INSN(),
   8225		},
   8226		INTERNAL | FLAG_LARGE_MEM,
   8227		{ },
   8228		{ { 32, 0 } },
   8229		.stack_depth = 0,
   8230	},
   8231	/* BPF_STX_MEM B/H/W/DW */
   8232	{
   8233		"BPF_STX_MEM | BPF_B",
   8234		.u.insns_int = {
   8235			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
   8236			BPF_LD_IMM64(R2, 0x0102030405060708ULL),
   8237			BPF_LD_IMM64(R3, 0x8090a0b0c0d0e008ULL),
   8238			BPF_STX_MEM(BPF_DW, R10, R1, -8),
   8239#ifdef __BIG_ENDIAN
   8240			BPF_STX_MEM(BPF_B, R10, R2, -1),
   8241#else
   8242			BPF_STX_MEM(BPF_B, R10, R2, -8),
   8243#endif
   8244			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
   8245			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   8246			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8247			BPF_EXIT_INSN(),
   8248		},
   8249		INTERNAL,
   8250		{ },
   8251		{ { 0, 0 } },
   8252		.stack_depth = 8,
   8253	},
   8254	{
   8255		"BPF_STX_MEM | BPF_B, MSB set",
   8256		.u.insns_int = {
   8257			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
   8258			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   8259			BPF_LD_IMM64(R3, 0x8090a0b0c0d0e088ULL),
   8260			BPF_STX_MEM(BPF_DW, R10, R1, -8),
   8261#ifdef __BIG_ENDIAN
   8262			BPF_STX_MEM(BPF_B, R10, R2, -1),
   8263#else
   8264			BPF_STX_MEM(BPF_B, R10, R2, -8),
   8265#endif
   8266			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
   8267			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   8268			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8269			BPF_EXIT_INSN(),
   8270		},
   8271		INTERNAL,
   8272		{ },
   8273		{ { 0, 0 } },
   8274		.stack_depth = 8,
   8275	},
   8276	{
   8277		"BPF_STX_MEM | BPF_H",
   8278		.u.insns_int = {
   8279			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
   8280			BPF_LD_IMM64(R2, 0x0102030405060708ULL),
   8281			BPF_LD_IMM64(R3, 0x8090a0b0c0d00708ULL),
   8282			BPF_STX_MEM(BPF_DW, R10, R1, -8),
   8283#ifdef __BIG_ENDIAN
   8284			BPF_STX_MEM(BPF_H, R10, R2, -2),
   8285#else
   8286			BPF_STX_MEM(BPF_H, R10, R2, -8),
   8287#endif
   8288			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
   8289			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   8290			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8291			BPF_EXIT_INSN(),
   8292		},
   8293		INTERNAL,
   8294		{ },
   8295		{ { 0, 0 } },
   8296		.stack_depth = 8,
   8297	},
   8298	{
   8299		"BPF_STX_MEM | BPF_H, MSB set",
   8300		.u.insns_int = {
   8301			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
   8302			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   8303			BPF_LD_IMM64(R3, 0x8090a0b0c0d08788ULL),
   8304			BPF_STX_MEM(BPF_DW, R10, R1, -8),
   8305#ifdef __BIG_ENDIAN
   8306			BPF_STX_MEM(BPF_H, R10, R2, -2),
   8307#else
   8308			BPF_STX_MEM(BPF_H, R10, R2, -8),
   8309#endif
   8310			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
   8311			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   8312			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8313			BPF_EXIT_INSN(),
   8314		},
   8315		INTERNAL,
   8316		{ },
   8317		{ { 0, 0 } },
   8318		.stack_depth = 8,
   8319	},
   8320	{
   8321		"BPF_STX_MEM | BPF_W",
   8322		.u.insns_int = {
   8323			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
   8324			BPF_LD_IMM64(R2, 0x0102030405060708ULL),
   8325			BPF_LD_IMM64(R3, 0x8090a0b005060708ULL),
   8326			BPF_STX_MEM(BPF_DW, R10, R1, -8),
   8327#ifdef __BIG_ENDIAN
   8328			BPF_STX_MEM(BPF_W, R10, R2, -4),
   8329#else
   8330			BPF_STX_MEM(BPF_W, R10, R2, -8),
   8331#endif
   8332			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
   8333			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   8334			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8335			BPF_EXIT_INSN(),
   8336		},
   8337		INTERNAL,
   8338		{ },
   8339		{ { 0, 0 } },
   8340		.stack_depth = 8,
   8341	},
   8342	{
   8343		"BPF_STX_MEM | BPF_W, MSB set",
   8344		.u.insns_int = {
   8345			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
   8346			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
   8347			BPF_LD_IMM64(R3, 0x8090a0b085868788ULL),
   8348			BPF_STX_MEM(BPF_DW, R10, R1, -8),
   8349#ifdef __BIG_ENDIAN
   8350			BPF_STX_MEM(BPF_W, R10, R2, -4),
   8351#else
   8352			BPF_STX_MEM(BPF_W, R10, R2, -8),
   8353#endif
   8354			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
   8355			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
   8356			BPF_ALU64_IMM(BPF_MOV, R0, 0),
   8357			BPF_EXIT_INSN(),
   8358		},
   8359		INTERNAL,
   8360		{ },
   8361		{ { 0, 0 } },
   8362		.stack_depth = 8,
   8363	},
   8364	/* BPF_ST(X) | BPF_MEM | BPF_B/H/W/DW */
   8365	{
   8366		"ST_MEM_B: Store/Load byte: max negative",
   8367		.u.insns_int = {
   8368			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   8369			BPF_ST_MEM(BPF_B, R10, -40, 0xff),
   8370			BPF_LDX_MEM(BPF_B, R0, R10, -40),
   8371			BPF_EXIT_INSN(),
   8372		},
   8373		INTERNAL,
   8374		{ },
   8375		{ { 0, 0xff } },
   8376		.stack_depth = 40,
   8377	},
   8378	{
   8379		"ST_MEM_B: Store/Load byte: max positive",
   8380		.u.insns_int = {
   8381			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   8382			BPF_ST_MEM(BPF_H, R10, -40, 0x7f),
   8383			BPF_LDX_MEM(BPF_H, R0, R10, -40),
   8384			BPF_EXIT_INSN(),
   8385		},
   8386		INTERNAL,
   8387		{ },
   8388		{ { 0, 0x7f } },
   8389		.stack_depth = 40,
   8390	},
   8391	{
   8392		"STX_MEM_B: Store/Load byte: max negative",
   8393		.u.insns_int = {
   8394			BPF_LD_IMM64(R0, 0),
   8395			BPF_LD_IMM64(R1, 0xffLL),
   8396			BPF_STX_MEM(BPF_B, R10, R1, -40),
   8397			BPF_LDX_MEM(BPF_B, R0, R10, -40),
   8398			BPF_EXIT_INSN(),
   8399		},
   8400		INTERNAL,
   8401		{ },
   8402		{ { 0, 0xff } },
   8403		.stack_depth = 40,
   8404	},
   8405	{
   8406		"ST_MEM_H: Store/Load half word: max negative",
   8407		.u.insns_int = {
   8408			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   8409			BPF_ST_MEM(BPF_H, R10, -40, 0xffff),
   8410			BPF_LDX_MEM(BPF_H, R0, R10, -40),
   8411			BPF_EXIT_INSN(),
   8412		},
   8413		INTERNAL,
   8414		{ },
   8415		{ { 0, 0xffff } },
   8416		.stack_depth = 40,
   8417	},
   8418	{
   8419		"ST_MEM_H: Store/Load half word: max positive",
   8420		.u.insns_int = {
   8421			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   8422			BPF_ST_MEM(BPF_H, R10, -40, 0x7fff),
   8423			BPF_LDX_MEM(BPF_H, R0, R10, -40),
   8424			BPF_EXIT_INSN(),
   8425		},
   8426		INTERNAL,
   8427		{ },
   8428		{ { 0, 0x7fff } },
   8429		.stack_depth = 40,
   8430	},
   8431	{
   8432		"STX_MEM_H: Store/Load half word: max negative",
   8433		.u.insns_int = {
   8434			BPF_LD_IMM64(R0, 0),
   8435			BPF_LD_IMM64(R1, 0xffffLL),
   8436			BPF_STX_MEM(BPF_H, R10, R1, -40),
   8437			BPF_LDX_MEM(BPF_H, R0, R10, -40),
   8438			BPF_EXIT_INSN(),
   8439		},
   8440		INTERNAL,
   8441		{ },
   8442		{ { 0, 0xffff } },
   8443		.stack_depth = 40,
   8444	},
   8445	{
   8446		"ST_MEM_W: Store/Load word: max negative",
   8447		.u.insns_int = {
   8448			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   8449			BPF_ST_MEM(BPF_W, R10, -40, 0xffffffff),
   8450			BPF_LDX_MEM(BPF_W, R0, R10, -40),
   8451			BPF_EXIT_INSN(),
   8452		},
   8453		INTERNAL,
   8454		{ },
   8455		{ { 0, 0xffffffff } },
   8456		.stack_depth = 40,
   8457	},
   8458	{
   8459		"ST_MEM_W: Store/Load word: max positive",
   8460		.u.insns_int = {
   8461			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   8462			BPF_ST_MEM(BPF_W, R10, -40, 0x7fffffff),
   8463			BPF_LDX_MEM(BPF_W, R0, R10, -40),
   8464			BPF_EXIT_INSN(),
   8465		},
   8466		INTERNAL,
   8467		{ },
   8468		{ { 0, 0x7fffffff } },
   8469		.stack_depth = 40,
   8470	},
   8471	{
   8472		"STX_MEM_W: Store/Load word: max negative",
   8473		.u.insns_int = {
   8474			BPF_LD_IMM64(R0, 0),
   8475			BPF_LD_IMM64(R1, 0xffffffffLL),
   8476			BPF_STX_MEM(BPF_W, R10, R1, -40),
   8477			BPF_LDX_MEM(BPF_W, R0, R10, -40),
   8478			BPF_EXIT_INSN(),
   8479		},
   8480		INTERNAL,
   8481		{ },
   8482		{ { 0, 0xffffffff } },
   8483		.stack_depth = 40,
   8484	},
   8485	{
   8486		"ST_MEM_DW: Store/Load double word: max negative",
   8487		.u.insns_int = {
   8488			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   8489			BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
   8490			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
   8491			BPF_EXIT_INSN(),
   8492		},
   8493		INTERNAL,
   8494		{ },
   8495		{ { 0, 0xffffffff } },
   8496		.stack_depth = 40,
   8497	},
   8498	{
   8499		"ST_MEM_DW: Store/Load double word: max negative 2",
   8500		.u.insns_int = {
   8501			BPF_LD_IMM64(R2, 0xffff00000000ffffLL),
   8502			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
   8503			BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
   8504			BPF_LDX_MEM(BPF_DW, R2, R10, -40),
   8505			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
   8506			BPF_MOV32_IMM(R0, 2),
   8507			BPF_EXIT_INSN(),
   8508			BPF_MOV32_IMM(R0, 1),
   8509			BPF_EXIT_INSN(),
   8510		},
   8511		INTERNAL,
   8512		{ },
   8513		{ { 0, 0x1 } },
   8514		.stack_depth = 40,
   8515	},
   8516	{
   8517		"ST_MEM_DW: Store/Load double word: max positive",
   8518		.u.insns_int = {
   8519			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   8520			BPF_ST_MEM(BPF_DW, R10, -40, 0x7fffffff),
   8521			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
   8522			BPF_EXIT_INSN(),
   8523		},
   8524		INTERNAL,
   8525		{ },
   8526		{ { 0, 0x7fffffff } },
   8527		.stack_depth = 40,
   8528	},
   8529	{
   8530		"STX_MEM_DW: Store/Load double word: max negative",
   8531		.u.insns_int = {
   8532			BPF_LD_IMM64(R0, 0),
   8533			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
   8534			BPF_STX_MEM(BPF_DW, R10, R1, -40),
   8535			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
   8536			BPF_EXIT_INSN(),
   8537		},
   8538		INTERNAL,
   8539		{ },
   8540		{ { 0, 0xffffffff } },
   8541		.stack_depth = 40,
   8542	},
   8543	{
   8544		"STX_MEM_DW: Store double word: first word in memory",
   8545		.u.insns_int = {
   8546			BPF_LD_IMM64(R0, 0),
   8547			BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
   8548			BPF_STX_MEM(BPF_DW, R10, R1, -40),
   8549			BPF_LDX_MEM(BPF_W, R0, R10, -40),
   8550			BPF_EXIT_INSN(),
   8551		},
   8552		INTERNAL,
   8553		{ },
   8554#ifdef __BIG_ENDIAN
   8555		{ { 0, 0x01234567 } },
   8556#else
   8557		{ { 0, 0x89abcdef } },
   8558#endif
   8559		.stack_depth = 40,
   8560	},
   8561	{
   8562		"STX_MEM_DW: Store double word: second word in memory",
   8563		.u.insns_int = {
   8564			BPF_LD_IMM64(R0, 0),
   8565			BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
   8566			BPF_STX_MEM(BPF_DW, R10, R1, -40),
   8567			BPF_LDX_MEM(BPF_W, R0, R10, -36),
   8568			BPF_EXIT_INSN(),
   8569		},
   8570		INTERNAL,
   8571		{ },
   8572#ifdef __BIG_ENDIAN
   8573		{ { 0, 0x89abcdef } },
   8574#else
   8575		{ { 0, 0x01234567 } },
   8576#endif
   8577		.stack_depth = 40,
   8578	},
   8579	/* BPF_STX | BPF_ATOMIC | BPF_W/DW */
   8580	{
   8581		"STX_XADD_W: X + 1 + 1 + 1 + ...",
   8582		{ },
   8583		INTERNAL,
   8584		{ },
   8585		{ { 0, 4134 } },
   8586		.fill_helper = bpf_fill_stxw,
   8587	},
   8588	{
   8589		"STX_XADD_DW: X + 1 + 1 + 1 + ...",
   8590		{ },
   8591		INTERNAL,
   8592		{ },
   8593		{ { 0, 4134 } },
   8594		.fill_helper = bpf_fill_stxdw,
   8595	},
   8596	/*
   8597	 * Exhaustive tests of atomic operation variants.
   8598	 * Individual tests are expanded from template macros for all
   8599	 * combinations of ALU operation, word size and fetching.
   8600	 */
   8601#define BPF_ATOMIC_POISON(width) ((width) == BPF_W ? (0xbaadf00dULL << 32) : 0)
   8602
   8603#define BPF_ATOMIC_OP_TEST1(width, op, logic, old, update, result)	\
   8604{									\
   8605	"BPF_ATOMIC | " #width ", " #op ": Test: "			\
   8606		#old " " #logic " " #update " = " #result,		\
   8607	.u.insns_int = {						\
   8608		BPF_LD_IMM64(R5, (update) | BPF_ATOMIC_POISON(width)),	\
   8609		BPF_ST_MEM(width, R10, -40, old),			\
   8610		BPF_ATOMIC_OP(width, op, R10, R5, -40),			\
   8611		BPF_LDX_MEM(width, R0, R10, -40),			\
   8612		BPF_ALU64_REG(BPF_MOV, R1, R0),				\
   8613		BPF_ALU64_IMM(BPF_RSH, R1, 32),				\
   8614		BPF_ALU64_REG(BPF_OR, R0, R1),				\
   8615		BPF_EXIT_INSN(),					\
   8616	},								\
   8617	INTERNAL,							\
   8618	{ },								\
   8619	{ { 0, result } },						\
   8620	.stack_depth = 40,						\
   8621}
   8622#define BPF_ATOMIC_OP_TEST2(width, op, logic, old, update, result)	\
   8623{									\
   8624	"BPF_ATOMIC | " #width ", " #op ": Test side effects, r10: "	\
   8625		#old " " #logic " " #update " = " #result,		\
   8626	.u.insns_int = {						\
   8627		BPF_ALU64_REG(BPF_MOV, R1, R10),			\
   8628		BPF_LD_IMM64(R0, (update) | BPF_ATOMIC_POISON(width)),	\
   8629		BPF_ST_MEM(BPF_W, R10, -40, old),			\
   8630		BPF_ATOMIC_OP(width, op, R10, R0, -40),			\
   8631		BPF_ALU64_REG(BPF_MOV, R0, R10),			\
   8632		BPF_ALU64_REG(BPF_SUB, R0, R1),				\
   8633		BPF_ALU64_REG(BPF_MOV, R1, R0),				\
   8634		BPF_ALU64_IMM(BPF_RSH, R1, 32),				\
   8635		BPF_ALU64_REG(BPF_OR, R0, R1),				\
   8636		BPF_EXIT_INSN(),					\
   8637	},								\
   8638	INTERNAL,							\
   8639	{ },								\
   8640	{ { 0, 0 } },							\
   8641	.stack_depth = 40,						\
   8642}
   8643#define BPF_ATOMIC_OP_TEST3(width, op, logic, old, update, result)	\
   8644{									\
   8645	"BPF_ATOMIC | " #width ", " #op ": Test side effects, r0: "	\
   8646		#old " " #logic " " #update " = " #result,		\
   8647	.u.insns_int = {						\
   8648		BPF_ALU64_REG(BPF_MOV, R0, R10),			\
   8649		BPF_LD_IMM64(R1, (update) | BPF_ATOMIC_POISON(width)),	\
   8650		BPF_ST_MEM(width, R10, -40, old),			\
   8651		BPF_ATOMIC_OP(width, op, R10, R1, -40),			\
   8652		BPF_ALU64_REG(BPF_SUB, R0, R10),			\
   8653		BPF_ALU64_REG(BPF_MOV, R1, R0),				\
   8654		BPF_ALU64_IMM(BPF_RSH, R1, 32),				\
   8655		BPF_ALU64_REG(BPF_OR, R0, R1),				\
   8656		BPF_EXIT_INSN(),					\
   8657	},								\
   8658	INTERNAL,                                                       \
   8659	{ },                                                            \
   8660	{ { 0, 0 } },                                                   \
   8661	.stack_depth = 40,                                              \
   8662}
   8663#define BPF_ATOMIC_OP_TEST4(width, op, logic, old, update, result)	\
   8664{									\
   8665	"BPF_ATOMIC | " #width ", " #op ": Test fetch: "		\
   8666		#old " " #logic " " #update " = " #result,		\
   8667	.u.insns_int = {						\
   8668		BPF_LD_IMM64(R3, (update) | BPF_ATOMIC_POISON(width)),	\
   8669		BPF_ST_MEM(width, R10, -40, old),			\
   8670		BPF_ATOMIC_OP(width, op, R10, R3, -40),			\
   8671		BPF_ALU32_REG(BPF_MOV, R0, R3),                         \
   8672		BPF_EXIT_INSN(),					\
   8673	},								\
   8674	INTERNAL,                                                       \
   8675	{ },                                                            \
   8676	{ { 0, (op) & BPF_FETCH ? old : update } },			\
   8677	.stack_depth = 40,                                              \
   8678}
   8679	/* BPF_ATOMIC | BPF_W: BPF_ADD */
   8680	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
   8681	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
   8682	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
   8683	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
   8684	/* BPF_ATOMIC | BPF_W: BPF_ADD | BPF_FETCH */
   8685	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
   8686	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
   8687	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
   8688	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
   8689	/* BPF_ATOMIC | BPF_DW: BPF_ADD */
   8690	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
   8691	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
   8692	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
   8693	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
   8694	/* BPF_ATOMIC | BPF_DW: BPF_ADD | BPF_FETCH */
   8695	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
   8696	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
   8697	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
   8698	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
   8699	/* BPF_ATOMIC | BPF_W: BPF_AND */
   8700	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
   8701	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
   8702	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
   8703	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
   8704	/* BPF_ATOMIC | BPF_W: BPF_AND | BPF_FETCH */
   8705	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
   8706	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
   8707	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
   8708	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
   8709	/* BPF_ATOMIC | BPF_DW: BPF_AND */
   8710	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
   8711	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
   8712	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
   8713	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
   8714	/* BPF_ATOMIC | BPF_DW: BPF_AND | BPF_FETCH */
   8715	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
   8716	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
   8717	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
   8718	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
   8719	/* BPF_ATOMIC | BPF_W: BPF_OR */
   8720	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
   8721	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
   8722	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
   8723	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
   8724	/* BPF_ATOMIC | BPF_W: BPF_OR | BPF_FETCH */
   8725	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
   8726	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
   8727	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
   8728	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
   8729	/* BPF_ATOMIC | BPF_DW: BPF_OR */
   8730	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
   8731	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
   8732	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
   8733	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
   8734	/* BPF_ATOMIC | BPF_DW: BPF_OR | BPF_FETCH */
   8735	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
   8736	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
   8737	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
   8738	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
   8739	/* BPF_ATOMIC | BPF_W: BPF_XOR */
   8740	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
   8741	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
   8742	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
   8743	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
   8744	/* BPF_ATOMIC | BPF_W: BPF_XOR | BPF_FETCH */
   8745	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
   8746	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
   8747	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
   8748	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
   8749	/* BPF_ATOMIC | BPF_DW: BPF_XOR */
   8750	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
   8751	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
   8752	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
   8753	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
   8754	/* BPF_ATOMIC | BPF_DW: BPF_XOR | BPF_FETCH */
   8755	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
   8756	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
   8757	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
   8758	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
   8759	/* BPF_ATOMIC | BPF_W: BPF_XCHG */
   8760	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
   8761	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
   8762	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
   8763	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
   8764	/* BPF_ATOMIC | BPF_DW: BPF_XCHG */
   8765	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
   8766	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
   8767	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
   8768	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
   8769#undef BPF_ATOMIC_POISON
   8770#undef BPF_ATOMIC_OP_TEST1
   8771#undef BPF_ATOMIC_OP_TEST2
   8772#undef BPF_ATOMIC_OP_TEST3
   8773#undef BPF_ATOMIC_OP_TEST4
   8774	/* BPF_ATOMIC | BPF_W, BPF_CMPXCHG */
   8775	{
   8776		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful return",
   8777		.u.insns_int = {
   8778			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
   8779			BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
   8780			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
   8781			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
   8782			BPF_EXIT_INSN(),
   8783		},
   8784		INTERNAL,
   8785		{ },
   8786		{ { 0, 0x01234567 } },
   8787		.stack_depth = 40,
   8788	},
   8789	{
   8790		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful store",
   8791		.u.insns_int = {
   8792			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
   8793			BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
   8794			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
   8795			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
   8796			BPF_LDX_MEM(BPF_W, R0, R10, -40),
   8797			BPF_EXIT_INSN(),
   8798		},
   8799		INTERNAL,
   8800		{ },
   8801		{ { 0, 0x89abcdef } },
   8802		.stack_depth = 40,
   8803	},
   8804	{
   8805		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure return",
   8806		.u.insns_int = {
   8807			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
   8808			BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
   8809			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
   8810			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
   8811			BPF_EXIT_INSN(),
   8812		},
   8813		INTERNAL,
   8814		{ },
   8815		{ { 0, 0x01234567 } },
   8816		.stack_depth = 40,
   8817	},
   8818	{
   8819		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure store",
   8820		.u.insns_int = {
   8821			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
   8822			BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
   8823			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
   8824			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
   8825			BPF_LDX_MEM(BPF_W, R0, R10, -40),
   8826			BPF_EXIT_INSN(),
   8827		},
   8828		INTERNAL,
   8829		{ },
   8830		{ { 0, 0x01234567 } },
   8831		.stack_depth = 40,
   8832	},
   8833	{
   8834		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test side effects",
   8835		.u.insns_int = {
   8836			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
   8837			BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
   8838			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
   8839			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
   8840			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
   8841			BPF_ALU32_REG(BPF_MOV, R0, R3),
   8842			BPF_EXIT_INSN(),
   8843		},
   8844		INTERNAL,
   8845		{ },
   8846		{ { 0, 0x89abcdef } },
   8847		.stack_depth = 40,
   8848	},
   8849	/* BPF_ATOMIC | BPF_DW, BPF_CMPXCHG */
   8850	{
   8851		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful return",
   8852		.u.insns_int = {
   8853			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
   8854			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
   8855			BPF_ALU64_REG(BPF_MOV, R0, R1),
   8856			BPF_STX_MEM(BPF_DW, R10, R1, -40),
   8857			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
   8858			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
   8859			BPF_ALU64_REG(BPF_SUB, R0, R1),
   8860			BPF_EXIT_INSN(),
   8861		},
   8862		INTERNAL,
   8863		{ },
   8864		{ { 0, 0 } },
   8865		.stack_depth = 40,
   8866	},
   8867	{
   8868		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful store",
   8869		.u.insns_int = {
   8870			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
   8871			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
   8872			BPF_ALU64_REG(BPF_MOV, R0, R1),
   8873			BPF_STX_MEM(BPF_DW, R10, R0, -40),
   8874			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
   8875			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
   8876			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
   8877			BPF_ALU64_REG(BPF_SUB, R0, R2),
   8878			BPF_EXIT_INSN(),
   8879		},
   8880		INTERNAL,
   8881		{ },
   8882		{ { 0, 0 } },
   8883		.stack_depth = 40,
   8884	},
   8885	{
   8886		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure return",
   8887		.u.insns_int = {
   8888			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
   8889			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
   8890			BPF_ALU64_REG(BPF_MOV, R0, R1),
   8891			BPF_ALU64_IMM(BPF_ADD, R0, 1),
   8892			BPF_STX_MEM(BPF_DW, R10, R1, -40),
   8893			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
   8894			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
   8895			BPF_ALU64_REG(BPF_SUB, R0, R1),
   8896			BPF_EXIT_INSN(),
   8897		},
   8898		INTERNAL,
   8899		{ },
   8900		{ { 0, 0 } },
   8901		.stack_depth = 40,
   8902	},
   8903	{
   8904		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure store",
   8905		.u.insns_int = {
   8906			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
   8907			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
   8908			BPF_ALU64_REG(BPF_MOV, R0, R1),
   8909			BPF_ALU64_IMM(BPF_ADD, R0, 1),
   8910			BPF_STX_MEM(BPF_DW, R10, R1, -40),
   8911			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
   8912			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
   8913			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
   8914			BPF_ALU64_REG(BPF_SUB, R0, R1),
   8915			BPF_EXIT_INSN(),
   8916		},
   8917		INTERNAL,
   8918		{ },
   8919		{ { 0, 0 } },
   8920		.stack_depth = 40,
   8921	},
   8922	{
   8923		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test side effects",
   8924		.u.insns_int = {
   8925			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
   8926			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
   8927			BPF_ALU64_REG(BPF_MOV, R0, R1),
   8928			BPF_STX_MEM(BPF_DW, R10, R1, -40),
   8929			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
   8930			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
   8931			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
   8932			BPF_ALU64_REG(BPF_SUB, R0, R2),
   8933			BPF_EXIT_INSN(),
   8934		},
   8935		INTERNAL,
   8936		{ },
   8937		{ { 0, 0 } },
   8938		.stack_depth = 40,
   8939	},
   8940	/* BPF_JMP32 | BPF_JEQ | BPF_K */
   8941	{
   8942		"JMP32_JEQ_K: Small immediate",
   8943		.u.insns_int = {
   8944			BPF_ALU32_IMM(BPF_MOV, R0, 123),
   8945			BPF_JMP32_IMM(BPF_JEQ, R0, 321, 1),
   8946			BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
   8947			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   8948			BPF_EXIT_INSN(),
   8949		},
   8950		INTERNAL,
   8951		{ },
   8952		{ { 0, 123 } }
   8953	},
   8954	{
   8955		"JMP32_JEQ_K: Large immediate",
   8956		.u.insns_int = {
   8957			BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
   8958			BPF_JMP32_IMM(BPF_JEQ, R0, 12345678 & 0xffff, 1),
   8959			BPF_JMP32_IMM(BPF_JEQ, R0, 12345678, 1),
   8960			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   8961			BPF_EXIT_INSN(),
   8962		},
   8963		INTERNAL,
   8964		{ },
   8965		{ { 0, 12345678 } }
   8966	},
   8967	{
   8968		"JMP32_JEQ_K: negative immediate",
   8969		.u.insns_int = {
   8970			BPF_ALU32_IMM(BPF_MOV, R0, -123),
   8971			BPF_JMP32_IMM(BPF_JEQ, R0,  123, 1),
   8972			BPF_JMP32_IMM(BPF_JEQ, R0, -123, 1),
   8973			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   8974			BPF_EXIT_INSN(),
   8975		},
   8976		INTERNAL,
   8977		{ },
   8978		{ { 0, -123 } }
   8979	},
   8980	/* BPF_JMP32 | BPF_JEQ | BPF_X */
   8981	{
   8982		"JMP32_JEQ_X",
   8983		.u.insns_int = {
   8984			BPF_ALU32_IMM(BPF_MOV, R0, 1234),
   8985			BPF_ALU32_IMM(BPF_MOV, R1, 4321),
   8986			BPF_JMP32_REG(BPF_JEQ, R0, R1, 2),
   8987			BPF_ALU32_IMM(BPF_MOV, R1, 1234),
   8988			BPF_JMP32_REG(BPF_JEQ, R0, R1, 1),
   8989			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   8990			BPF_EXIT_INSN(),
   8991		},
   8992		INTERNAL,
   8993		{ },
   8994		{ { 0, 1234 } }
   8995	},
   8996	/* BPF_JMP32 | BPF_JNE | BPF_K */
   8997	{
   8998		"JMP32_JNE_K: Small immediate",
   8999		.u.insns_int = {
   9000			BPF_ALU32_IMM(BPF_MOV, R0, 123),
   9001			BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
   9002			BPF_JMP32_IMM(BPF_JNE, R0, 321, 1),
   9003			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9004			BPF_EXIT_INSN(),
   9005		},
   9006		INTERNAL,
   9007		{ },
   9008		{ { 0, 123 } }
   9009	},
   9010	{
   9011		"JMP32_JNE_K: Large immediate",
   9012		.u.insns_int = {
   9013			BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
   9014			BPF_JMP32_IMM(BPF_JNE, R0, 12345678, 1),
   9015			BPF_JMP32_IMM(BPF_JNE, R0, 12345678 & 0xffff, 1),
   9016			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9017			BPF_EXIT_INSN(),
   9018		},
   9019		INTERNAL,
   9020		{ },
   9021		{ { 0, 12345678 } }
   9022	},
   9023	{
   9024		"JMP32_JNE_K: negative immediate",
   9025		.u.insns_int = {
   9026			BPF_ALU32_IMM(BPF_MOV, R0, -123),
   9027			BPF_JMP32_IMM(BPF_JNE, R0, -123, 1),
   9028			BPF_JMP32_IMM(BPF_JNE, R0,  123, 1),
   9029			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9030			BPF_EXIT_INSN(),
   9031		},
   9032		INTERNAL,
   9033		{ },
   9034		{ { 0, -123 } }
   9035	},
   9036	/* BPF_JMP32 | BPF_JNE | BPF_X */
   9037	{
   9038		"JMP32_JNE_X",
   9039		.u.insns_int = {
   9040			BPF_ALU32_IMM(BPF_MOV, R0, 1234),
   9041			BPF_ALU32_IMM(BPF_MOV, R1, 1234),
   9042			BPF_JMP32_REG(BPF_JNE, R0, R1, 2),
   9043			BPF_ALU32_IMM(BPF_MOV, R1, 4321),
   9044			BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
   9045			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9046			BPF_EXIT_INSN(),
   9047		},
   9048		INTERNAL,
   9049		{ },
   9050		{ { 0, 1234 } }
   9051	},
   9052	/* BPF_JMP32 | BPF_JSET | BPF_K */
   9053	{
   9054		"JMP32_JSET_K: Small immediate",
   9055		.u.insns_int = {
   9056			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9057			BPF_JMP32_IMM(BPF_JSET, R0, 2, 1),
   9058			BPF_JMP32_IMM(BPF_JSET, R0, 3, 1),
   9059			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9060			BPF_EXIT_INSN(),
   9061		},
   9062		INTERNAL,
   9063		{ },
   9064		{ { 0, 1 } }
   9065	},
   9066	{
   9067		"JMP32_JSET_K: Large immediate",
   9068		.u.insns_int = {
   9069			BPF_ALU32_IMM(BPF_MOV, R0, 0x40000000),
   9070			BPF_JMP32_IMM(BPF_JSET, R0, 0x3fffffff, 1),
   9071			BPF_JMP32_IMM(BPF_JSET, R0, 0x60000000, 1),
   9072			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9073			BPF_EXIT_INSN(),
   9074		},
   9075		INTERNAL,
   9076		{ },
   9077		{ { 0, 0x40000000 } }
   9078	},
   9079	{
   9080		"JMP32_JSET_K: negative immediate",
   9081		.u.insns_int = {
   9082			BPF_ALU32_IMM(BPF_MOV, R0, -123),
   9083			BPF_JMP32_IMM(BPF_JSET, R0, -1, 1),
   9084			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9085			BPF_EXIT_INSN(),
   9086		},
   9087		INTERNAL,
   9088		{ },
   9089		{ { 0, -123 } }
   9090	},
   9091	/* BPF_JMP32 | BPF_JSET | BPF_X */
   9092	{
   9093		"JMP32_JSET_X",
   9094		.u.insns_int = {
   9095			BPF_ALU32_IMM(BPF_MOV, R0, 8),
   9096			BPF_ALU32_IMM(BPF_MOV, R1, 7),
   9097			BPF_JMP32_REG(BPF_JSET, R0, R1, 2),
   9098			BPF_ALU32_IMM(BPF_MOV, R1, 8 | 2),
   9099			BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
   9100			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9101			BPF_EXIT_INSN(),
   9102		},
   9103		INTERNAL,
   9104		{ },
   9105		{ { 0, 8 } }
   9106	},
   9107	/* BPF_JMP32 | BPF_JGT | BPF_K */
   9108	{
   9109		"JMP32_JGT_K: Small immediate",
   9110		.u.insns_int = {
   9111			BPF_ALU32_IMM(BPF_MOV, R0, 123),
   9112			BPF_JMP32_IMM(BPF_JGT, R0, 123, 1),
   9113			BPF_JMP32_IMM(BPF_JGT, R0, 122, 1),
   9114			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9115			BPF_EXIT_INSN(),
   9116		},
   9117		INTERNAL,
   9118		{ },
   9119		{ { 0, 123 } }
   9120	},
   9121	{
   9122		"JMP32_JGT_K: Large immediate",
   9123		.u.insns_int = {
   9124			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
   9125			BPF_JMP32_IMM(BPF_JGT, R0, 0xffffffff, 1),
   9126			BPF_JMP32_IMM(BPF_JGT, R0, 0xfffffffd, 1),
   9127			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9128			BPF_EXIT_INSN(),
   9129		},
   9130		INTERNAL,
   9131		{ },
   9132		{ { 0, 0xfffffffe } }
   9133	},
   9134	/* BPF_JMP32 | BPF_JGT | BPF_X */
   9135	{
   9136		"JMP32_JGT_X",
   9137		.u.insns_int = {
   9138			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
   9139			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
   9140			BPF_JMP32_REG(BPF_JGT, R0, R1, 2),
   9141			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
   9142			BPF_JMP32_REG(BPF_JGT, R0, R1, 1),
   9143			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9144			BPF_EXIT_INSN(),
   9145		},
   9146		INTERNAL,
   9147		{ },
   9148		{ { 0, 0xfffffffe } }
   9149	},
   9150	/* BPF_JMP32 | BPF_JGE | BPF_K */
   9151	{
   9152		"JMP32_JGE_K: Small immediate",
   9153		.u.insns_int = {
   9154			BPF_ALU32_IMM(BPF_MOV, R0, 123),
   9155			BPF_JMP32_IMM(BPF_JGE, R0, 124, 1),
   9156			BPF_JMP32_IMM(BPF_JGE, R0, 123, 1),
   9157			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9158			BPF_EXIT_INSN(),
   9159		},
   9160		INTERNAL,
   9161		{ },
   9162		{ { 0, 123 } }
   9163	},
   9164	{
   9165		"JMP32_JGE_K: Large immediate",
   9166		.u.insns_int = {
   9167			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
   9168			BPF_JMP32_IMM(BPF_JGE, R0, 0xffffffff, 1),
   9169			BPF_JMP32_IMM(BPF_JGE, R0, 0xfffffffe, 1),
   9170			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9171			BPF_EXIT_INSN(),
   9172		},
   9173		INTERNAL,
   9174		{ },
   9175		{ { 0, 0xfffffffe } }
   9176	},
   9177	/* BPF_JMP32 | BPF_JGE | BPF_X */
   9178	{
   9179		"JMP32_JGE_X",
   9180		.u.insns_int = {
   9181			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
   9182			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
   9183			BPF_JMP32_REG(BPF_JGE, R0, R1, 2),
   9184			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
   9185			BPF_JMP32_REG(BPF_JGE, R0, R1, 1),
   9186			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9187			BPF_EXIT_INSN(),
   9188		},
   9189		INTERNAL,
   9190		{ },
   9191		{ { 0, 0xfffffffe } }
   9192	},
   9193	/* BPF_JMP32 | BPF_JLT | BPF_K */
   9194	{
   9195		"JMP32_JLT_K: Small immediate",
   9196		.u.insns_int = {
   9197			BPF_ALU32_IMM(BPF_MOV, R0, 123),
   9198			BPF_JMP32_IMM(BPF_JLT, R0, 123, 1),
   9199			BPF_JMP32_IMM(BPF_JLT, R0, 124, 1),
   9200			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9201			BPF_EXIT_INSN(),
   9202		},
   9203		INTERNAL,
   9204		{ },
   9205		{ { 0, 123 } }
   9206	},
   9207	{
   9208		"JMP32_JLT_K: Large immediate",
   9209		.u.insns_int = {
   9210			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
   9211			BPF_JMP32_IMM(BPF_JLT, R0, 0xfffffffd, 1),
   9212			BPF_JMP32_IMM(BPF_JLT, R0, 0xffffffff, 1),
   9213			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9214			BPF_EXIT_INSN(),
   9215		},
   9216		INTERNAL,
   9217		{ },
   9218		{ { 0, 0xfffffffe } }
   9219	},
   9220	/* BPF_JMP32 | BPF_JLT | BPF_X */
   9221	{
   9222		"JMP32_JLT_X",
   9223		.u.insns_int = {
   9224			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
   9225			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
   9226			BPF_JMP32_REG(BPF_JLT, R0, R1, 2),
   9227			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
   9228			BPF_JMP32_REG(BPF_JLT, R0, R1, 1),
   9229			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9230			BPF_EXIT_INSN(),
   9231		},
   9232		INTERNAL,
   9233		{ },
   9234		{ { 0, 0xfffffffe } }
   9235	},
   9236	/* BPF_JMP32 | BPF_JLE | BPF_K */
   9237	{
   9238		"JMP32_JLE_K: Small immediate",
   9239		.u.insns_int = {
   9240			BPF_ALU32_IMM(BPF_MOV, R0, 123),
   9241			BPF_JMP32_IMM(BPF_JLE, R0, 122, 1),
   9242			BPF_JMP32_IMM(BPF_JLE, R0, 123, 1),
   9243			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9244			BPF_EXIT_INSN(),
   9245		},
   9246		INTERNAL,
   9247		{ },
   9248		{ { 0, 123 } }
   9249	},
   9250	{
   9251		"JMP32_JLE_K: Large immediate",
   9252		.u.insns_int = {
   9253			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
   9254			BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffd, 1),
   9255			BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffe, 1),
   9256			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9257			BPF_EXIT_INSN(),
   9258		},
   9259		INTERNAL,
   9260		{ },
   9261		{ { 0, 0xfffffffe } }
   9262	},
   9263	/* BPF_JMP32 | BPF_JLE | BPF_X */
   9264	{
   9265		"JMP32_JLE_X",
   9266		.u.insns_int = {
   9267			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
   9268			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
   9269			BPF_JMP32_REG(BPF_JLE, R0, R1, 2),
   9270			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
   9271			BPF_JMP32_REG(BPF_JLE, R0, R1, 1),
   9272			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9273			BPF_EXIT_INSN(),
   9274		},
   9275		INTERNAL,
   9276		{ },
   9277		{ { 0, 0xfffffffe } }
   9278	},
   9279	/* BPF_JMP32 | BPF_JSGT | BPF_K */
   9280	{
   9281		"JMP32_JSGT_K: Small immediate",
   9282		.u.insns_int = {
   9283			BPF_ALU32_IMM(BPF_MOV, R0, -123),
   9284			BPF_JMP32_IMM(BPF_JSGT, R0, -123, 1),
   9285			BPF_JMP32_IMM(BPF_JSGT, R0, -124, 1),
   9286			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9287			BPF_EXIT_INSN(),
   9288		},
   9289		INTERNAL,
   9290		{ },
   9291		{ { 0, -123 } }
   9292	},
   9293	{
   9294		"JMP32_JSGT_K: Large immediate",
   9295		.u.insns_int = {
   9296			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
   9297			BPF_JMP32_IMM(BPF_JSGT, R0, -12345678, 1),
   9298			BPF_JMP32_IMM(BPF_JSGT, R0, -12345679, 1),
   9299			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9300			BPF_EXIT_INSN(),
   9301		},
   9302		INTERNAL,
   9303		{ },
   9304		{ { 0, -12345678 } }
   9305	},
   9306	/* BPF_JMP32 | BPF_JSGT | BPF_X */
   9307	{
   9308		"JMP32_JSGT_X",
   9309		.u.insns_int = {
   9310			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
   9311			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
   9312			BPF_JMP32_REG(BPF_JSGT, R0, R1, 2),
   9313			BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
   9314			BPF_JMP32_REG(BPF_JSGT, R0, R1, 1),
   9315			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9316			BPF_EXIT_INSN(),
   9317		},
   9318		INTERNAL,
   9319		{ },
   9320		{ { 0, -12345678 } }
   9321	},
   9322	/* BPF_JMP32 | BPF_JSGE | BPF_K */
   9323	{
   9324		"JMP32_JSGE_K: Small immediate",
   9325		.u.insns_int = {
   9326			BPF_ALU32_IMM(BPF_MOV, R0, -123),
   9327			BPF_JMP32_IMM(BPF_JSGE, R0, -122, 1),
   9328			BPF_JMP32_IMM(BPF_JSGE, R0, -123, 1),
   9329			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9330			BPF_EXIT_INSN(),
   9331		},
   9332		INTERNAL,
   9333		{ },
   9334		{ { 0, -123 } }
   9335	},
   9336	{
   9337		"JMP32_JSGE_K: Large immediate",
   9338		.u.insns_int = {
   9339			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
   9340			BPF_JMP32_IMM(BPF_JSGE, R0, -12345677, 1),
   9341			BPF_JMP32_IMM(BPF_JSGE, R0, -12345678, 1),
   9342			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9343			BPF_EXIT_INSN(),
   9344		},
   9345		INTERNAL,
   9346		{ },
   9347		{ { 0, -12345678 } }
   9348	},
   9349	/* BPF_JMP32 | BPF_JSGE | BPF_X */
   9350	{
   9351		"JMP32_JSGE_X",
   9352		.u.insns_int = {
   9353			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
   9354			BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
   9355			BPF_JMP32_REG(BPF_JSGE, R0, R1, 2),
   9356			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
   9357			BPF_JMP32_REG(BPF_JSGE, R0, R1, 1),
   9358			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9359			BPF_EXIT_INSN(),
   9360		},
   9361		INTERNAL,
   9362		{ },
   9363		{ { 0, -12345678 } }
   9364	},
   9365	/* BPF_JMP32 | BPF_JSLT | BPF_K */
   9366	{
   9367		"JMP32_JSLT_K: Small immediate",
   9368		.u.insns_int = {
   9369			BPF_ALU32_IMM(BPF_MOV, R0, -123),
   9370			BPF_JMP32_IMM(BPF_JSLT, R0, -123, 1),
   9371			BPF_JMP32_IMM(BPF_JSLT, R0, -122, 1),
   9372			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9373			BPF_EXIT_INSN(),
   9374		},
   9375		INTERNAL,
   9376		{ },
   9377		{ { 0, -123 } }
   9378	},
   9379	{
   9380		"JMP32_JSLT_K: Large immediate",
   9381		.u.insns_int = {
   9382			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
   9383			BPF_JMP32_IMM(BPF_JSLT, R0, -12345678, 1),
   9384			BPF_JMP32_IMM(BPF_JSLT, R0, -12345677, 1),
   9385			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9386			BPF_EXIT_INSN(),
   9387		},
   9388		INTERNAL,
   9389		{ },
   9390		{ { 0, -12345678 } }
   9391	},
   9392	/* BPF_JMP32 | BPF_JSLT | BPF_X */
   9393	{
   9394		"JMP32_JSLT_X",
   9395		.u.insns_int = {
   9396			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
   9397			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
   9398			BPF_JMP32_REG(BPF_JSLT, R0, R1, 2),
   9399			BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
   9400			BPF_JMP32_REG(BPF_JSLT, R0, R1, 1),
   9401			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9402			BPF_EXIT_INSN(),
   9403		},
   9404		INTERNAL,
   9405		{ },
   9406		{ { 0, -12345678 } }
   9407	},
   9408	/* BPF_JMP32 | BPF_JSLE | BPF_K */
   9409	{
   9410		"JMP32_JSLE_K: Small immediate",
   9411		.u.insns_int = {
   9412			BPF_ALU32_IMM(BPF_MOV, R0, -123),
   9413			BPF_JMP32_IMM(BPF_JSLE, R0, -124, 1),
   9414			BPF_JMP32_IMM(BPF_JSLE, R0, -123, 1),
   9415			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9416			BPF_EXIT_INSN(),
   9417		},
   9418		INTERNAL,
   9419		{ },
   9420		{ { 0, -123 } }
   9421	},
   9422	{
   9423		"JMP32_JSLE_K: Large immediate",
   9424		.u.insns_int = {
   9425			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
   9426			BPF_JMP32_IMM(BPF_JSLE, R0, -12345679, 1),
   9427			BPF_JMP32_IMM(BPF_JSLE, R0, -12345678, 1),
   9428			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9429			BPF_EXIT_INSN(),
   9430		},
   9431		INTERNAL,
   9432		{ },
   9433		{ { 0, -12345678 } }
   9434	},
   9435	/* BPF_JMP32 | BPF_JSLE | BPF_K */
   9436	{
   9437		"JMP32_JSLE_X",
   9438		.u.insns_int = {
   9439			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
   9440			BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
   9441			BPF_JMP32_REG(BPF_JSLE, R0, R1, 2),
   9442			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
   9443			BPF_JMP32_REG(BPF_JSLE, R0, R1, 1),
   9444			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9445			BPF_EXIT_INSN(),
   9446		},
   9447		INTERNAL,
   9448		{ },
   9449		{ { 0, -12345678 } }
   9450	},
   9451	/* BPF_JMP | BPF_EXIT */
   9452	{
   9453		"JMP_EXIT",
   9454		.u.insns_int = {
   9455			BPF_ALU32_IMM(BPF_MOV, R0, 0x4711),
   9456			BPF_EXIT_INSN(),
   9457			BPF_ALU32_IMM(BPF_MOV, R0, 0x4712),
   9458		},
   9459		INTERNAL,
   9460		{ },
   9461		{ { 0, 0x4711 } },
   9462	},
   9463	/* BPF_JMP | BPF_JA */
   9464	{
   9465		"JMP_JA: Unconditional jump: if (true) return 1",
   9466		.u.insns_int = {
   9467			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9468			BPF_JMP_IMM(BPF_JA, 0, 0, 1),
   9469			BPF_EXIT_INSN(),
   9470			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9471			BPF_EXIT_INSN(),
   9472		},
   9473		INTERNAL,
   9474		{ },
   9475		{ { 0, 1 } },
   9476	},
   9477	/* BPF_JMP | BPF_JSLT | BPF_K */
   9478	{
   9479		"JMP_JSLT_K: Signed jump: if (-2 < -1) return 1",
   9480		.u.insns_int = {
   9481			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9482			BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
   9483			BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
   9484			BPF_EXIT_INSN(),
   9485			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9486			BPF_EXIT_INSN(),
   9487		},
   9488		INTERNAL,
   9489		{ },
   9490		{ { 0, 1 } },
   9491	},
   9492	{
   9493		"JMP_JSLT_K: Signed jump: if (-1 < -1) return 0",
   9494		.u.insns_int = {
   9495			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9496			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
   9497			BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
   9498			BPF_EXIT_INSN(),
   9499			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9500			BPF_EXIT_INSN(),
   9501		},
   9502		INTERNAL,
   9503		{ },
   9504		{ { 0, 1 } },
   9505	},
   9506	/* BPF_JMP | BPF_JSGT | BPF_K */
   9507	{
   9508		"JMP_JSGT_K: Signed jump: if (-1 > -2) return 1",
   9509		.u.insns_int = {
   9510			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9511			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
   9512			BPF_JMP_IMM(BPF_JSGT, R1, -2, 1),
   9513			BPF_EXIT_INSN(),
   9514			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9515			BPF_EXIT_INSN(),
   9516		},
   9517		INTERNAL,
   9518		{ },
   9519		{ { 0, 1 } },
   9520	},
   9521	{
   9522		"JMP_JSGT_K: Signed jump: if (-1 > -1) return 0",
   9523		.u.insns_int = {
   9524			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9525			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
   9526			BPF_JMP_IMM(BPF_JSGT, R1, -1, 1),
   9527			BPF_EXIT_INSN(),
   9528			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9529			BPF_EXIT_INSN(),
   9530		},
   9531		INTERNAL,
   9532		{ },
   9533		{ { 0, 1 } },
   9534	},
   9535	/* BPF_JMP | BPF_JSLE | BPF_K */
   9536	{
   9537		"JMP_JSLE_K: Signed jump: if (-2 <= -1) return 1",
   9538		.u.insns_int = {
   9539			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9540			BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
   9541			BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
   9542			BPF_EXIT_INSN(),
   9543			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9544			BPF_EXIT_INSN(),
   9545		},
   9546		INTERNAL,
   9547		{ },
   9548		{ { 0, 1 } },
   9549	},
   9550	{
   9551		"JMP_JSLE_K: Signed jump: if (-1 <= -1) return 1",
   9552		.u.insns_int = {
   9553			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9554			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
   9555			BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
   9556			BPF_EXIT_INSN(),
   9557			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9558			BPF_EXIT_INSN(),
   9559		},
   9560		INTERNAL,
   9561		{ },
   9562		{ { 0, 1 } },
   9563	},
   9564	{
   9565		"JMP_JSLE_K: Signed jump: value walk 1",
   9566		.u.insns_int = {
   9567			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9568			BPF_LD_IMM64(R1, 3),
   9569			BPF_JMP_IMM(BPF_JSLE, R1, 0, 6),
   9570			BPF_ALU64_IMM(BPF_SUB, R1, 1),
   9571			BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
   9572			BPF_ALU64_IMM(BPF_SUB, R1, 1),
   9573			BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
   9574			BPF_ALU64_IMM(BPF_SUB, R1, 1),
   9575			BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
   9576			BPF_EXIT_INSN(),		/* bad exit */
   9577			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
   9578			BPF_EXIT_INSN(),
   9579		},
   9580		INTERNAL,
   9581		{ },
   9582		{ { 0, 1 } },
   9583	},
   9584	{
   9585		"JMP_JSLE_K: Signed jump: value walk 2",
   9586		.u.insns_int = {
   9587			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9588			BPF_LD_IMM64(R1, 3),
   9589			BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
   9590			BPF_ALU64_IMM(BPF_SUB, R1, 2),
   9591			BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
   9592			BPF_ALU64_IMM(BPF_SUB, R1, 2),
   9593			BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
   9594			BPF_EXIT_INSN(),		/* bad exit */
   9595			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
   9596			BPF_EXIT_INSN(),
   9597		},
   9598		INTERNAL,
   9599		{ },
   9600		{ { 0, 1 } },
   9601	},
   9602	/* BPF_JMP | BPF_JSGE | BPF_K */
   9603	{
   9604		"JMP_JSGE_K: Signed jump: if (-1 >= -2) return 1",
   9605		.u.insns_int = {
   9606			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9607			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
   9608			BPF_JMP_IMM(BPF_JSGE, R1, -2, 1),
   9609			BPF_EXIT_INSN(),
   9610			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9611			BPF_EXIT_INSN(),
   9612		},
   9613		INTERNAL,
   9614		{ },
   9615		{ { 0, 1 } },
   9616	},
   9617	{
   9618		"JMP_JSGE_K: Signed jump: if (-1 >= -1) return 1",
   9619		.u.insns_int = {
   9620			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9621			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
   9622			BPF_JMP_IMM(BPF_JSGE, R1, -1, 1),
   9623			BPF_EXIT_INSN(),
   9624			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9625			BPF_EXIT_INSN(),
   9626		},
   9627		INTERNAL,
   9628		{ },
   9629		{ { 0, 1 } },
   9630	},
   9631	{
   9632		"JMP_JSGE_K: Signed jump: value walk 1",
   9633		.u.insns_int = {
   9634			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9635			BPF_LD_IMM64(R1, -3),
   9636			BPF_JMP_IMM(BPF_JSGE, R1, 0, 6),
   9637			BPF_ALU64_IMM(BPF_ADD, R1, 1),
   9638			BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
   9639			BPF_ALU64_IMM(BPF_ADD, R1, 1),
   9640			BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
   9641			BPF_ALU64_IMM(BPF_ADD, R1, 1),
   9642			BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
   9643			BPF_EXIT_INSN(),		/* bad exit */
   9644			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
   9645			BPF_EXIT_INSN(),
   9646		},
   9647		INTERNAL,
   9648		{ },
   9649		{ { 0, 1 } },
   9650	},
   9651	{
   9652		"JMP_JSGE_K: Signed jump: value walk 2",
   9653		.u.insns_int = {
   9654			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9655			BPF_LD_IMM64(R1, -3),
   9656			BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
   9657			BPF_ALU64_IMM(BPF_ADD, R1, 2),
   9658			BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
   9659			BPF_ALU64_IMM(BPF_ADD, R1, 2),
   9660			BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
   9661			BPF_EXIT_INSN(),		/* bad exit */
   9662			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
   9663			BPF_EXIT_INSN(),
   9664		},
   9665		INTERNAL,
   9666		{ },
   9667		{ { 0, 1 } },
   9668	},
   9669	/* BPF_JMP | BPF_JGT | BPF_K */
   9670	{
   9671		"JMP_JGT_K: if (3 > 2) return 1",
   9672		.u.insns_int = {
   9673			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9674			BPF_LD_IMM64(R1, 3),
   9675			BPF_JMP_IMM(BPF_JGT, R1, 2, 1),
   9676			BPF_EXIT_INSN(),
   9677			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9678			BPF_EXIT_INSN(),
   9679		},
   9680		INTERNAL,
   9681		{ },
   9682		{ { 0, 1 } },
   9683	},
   9684	{
   9685		"JMP_JGT_K: Unsigned jump: if (-1 > 1) return 1",
   9686		.u.insns_int = {
   9687			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9688			BPF_LD_IMM64(R1, -1),
   9689			BPF_JMP_IMM(BPF_JGT, R1, 1, 1),
   9690			BPF_EXIT_INSN(),
   9691			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9692			BPF_EXIT_INSN(),
   9693		},
   9694		INTERNAL,
   9695		{ },
   9696		{ { 0, 1 } },
   9697	},
   9698	/* BPF_JMP | BPF_JLT | BPF_K */
   9699	{
   9700		"JMP_JLT_K: if (2 < 3) return 1",
   9701		.u.insns_int = {
   9702			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9703			BPF_LD_IMM64(R1, 2),
   9704			BPF_JMP_IMM(BPF_JLT, R1, 3, 1),
   9705			BPF_EXIT_INSN(),
   9706			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9707			BPF_EXIT_INSN(),
   9708		},
   9709		INTERNAL,
   9710		{ },
   9711		{ { 0, 1 } },
   9712	},
   9713	{
   9714		"JMP_JGT_K: Unsigned jump: if (1 < -1) return 1",
   9715		.u.insns_int = {
   9716			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9717			BPF_LD_IMM64(R1, 1),
   9718			BPF_JMP_IMM(BPF_JLT, R1, -1, 1),
   9719			BPF_EXIT_INSN(),
   9720			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9721			BPF_EXIT_INSN(),
   9722		},
   9723		INTERNAL,
   9724		{ },
   9725		{ { 0, 1 } },
   9726	},
   9727	/* BPF_JMP | BPF_JGE | BPF_K */
   9728	{
   9729		"JMP_JGE_K: if (3 >= 2) return 1",
   9730		.u.insns_int = {
   9731			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9732			BPF_LD_IMM64(R1, 3),
   9733			BPF_JMP_IMM(BPF_JGE, R1, 2, 1),
   9734			BPF_EXIT_INSN(),
   9735			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9736			BPF_EXIT_INSN(),
   9737		},
   9738		INTERNAL,
   9739		{ },
   9740		{ { 0, 1 } },
   9741	},
   9742	/* BPF_JMP | BPF_JLE | BPF_K */
   9743	{
   9744		"JMP_JLE_K: if (2 <= 3) return 1",
   9745		.u.insns_int = {
   9746			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9747			BPF_LD_IMM64(R1, 2),
   9748			BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
   9749			BPF_EXIT_INSN(),
   9750			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9751			BPF_EXIT_INSN(),
   9752		},
   9753		INTERNAL,
   9754		{ },
   9755		{ { 0, 1 } },
   9756	},
   9757	/* BPF_JMP | BPF_JGT | BPF_K jump backwards */
   9758	{
   9759		"JMP_JGT_K: if (3 > 2) return 1 (jump backwards)",
   9760		.u.insns_int = {
   9761			BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
   9762			BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
   9763			BPF_EXIT_INSN(),
   9764			BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
   9765			BPF_LD_IMM64(R1, 3), /* note: this takes 2 insns */
   9766			BPF_JMP_IMM(BPF_JGT, R1, 2, -6), /* goto out */
   9767			BPF_EXIT_INSN(),
   9768		},
   9769		INTERNAL,
   9770		{ },
   9771		{ { 0, 1 } },
   9772	},
   9773	{
   9774		"JMP_JGE_K: if (3 >= 3) return 1",
   9775		.u.insns_int = {
   9776			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9777			BPF_LD_IMM64(R1, 3),
   9778			BPF_JMP_IMM(BPF_JGE, R1, 3, 1),
   9779			BPF_EXIT_INSN(),
   9780			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9781			BPF_EXIT_INSN(),
   9782		},
   9783		INTERNAL,
   9784		{ },
   9785		{ { 0, 1 } },
   9786	},
   9787	/* BPF_JMP | BPF_JLT | BPF_K jump backwards */
   9788	{
   9789		"JMP_JGT_K: if (2 < 3) return 1 (jump backwards)",
   9790		.u.insns_int = {
   9791			BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
   9792			BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
   9793			BPF_EXIT_INSN(),
   9794			BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
   9795			BPF_LD_IMM64(R1, 2), /* note: this takes 2 insns */
   9796			BPF_JMP_IMM(BPF_JLT, R1, 3, -6), /* goto out */
   9797			BPF_EXIT_INSN(),
   9798		},
   9799		INTERNAL,
   9800		{ },
   9801		{ { 0, 1 } },
   9802	},
   9803	{
   9804		"JMP_JLE_K: if (3 <= 3) return 1",
   9805		.u.insns_int = {
   9806			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9807			BPF_LD_IMM64(R1, 3),
   9808			BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
   9809			BPF_EXIT_INSN(),
   9810			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9811			BPF_EXIT_INSN(),
   9812		},
   9813		INTERNAL,
   9814		{ },
   9815		{ { 0, 1 } },
   9816	},
   9817	/* BPF_JMP | BPF_JNE | BPF_K */
   9818	{
   9819		"JMP_JNE_K: if (3 != 2) return 1",
   9820		.u.insns_int = {
   9821			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9822			BPF_LD_IMM64(R1, 3),
   9823			BPF_JMP_IMM(BPF_JNE, R1, 2, 1),
   9824			BPF_EXIT_INSN(),
   9825			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9826			BPF_EXIT_INSN(),
   9827		},
   9828		INTERNAL,
   9829		{ },
   9830		{ { 0, 1 } },
   9831	},
   9832	/* BPF_JMP | BPF_JEQ | BPF_K */
   9833	{
   9834		"JMP_JEQ_K: if (3 == 3) return 1",
   9835		.u.insns_int = {
   9836			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9837			BPF_LD_IMM64(R1, 3),
   9838			BPF_JMP_IMM(BPF_JEQ, R1, 3, 1),
   9839			BPF_EXIT_INSN(),
   9840			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9841			BPF_EXIT_INSN(),
   9842		},
   9843		INTERNAL,
   9844		{ },
   9845		{ { 0, 1 } },
   9846	},
   9847	/* BPF_JMP | BPF_JSET | BPF_K */
   9848	{
   9849		"JMP_JSET_K: if (0x3 & 0x2) return 1",
   9850		.u.insns_int = {
   9851			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9852			BPF_LD_IMM64(R1, 3),
   9853			BPF_JMP_IMM(BPF_JSET, R1, 2, 1),
   9854			BPF_EXIT_INSN(),
   9855			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9856			BPF_EXIT_INSN(),
   9857		},
   9858		INTERNAL,
   9859		{ },
   9860		{ { 0, 1 } },
   9861	},
   9862	{
   9863		"JMP_JSET_K: if (0x3 & 0xffffffff) return 1",
   9864		.u.insns_int = {
   9865			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9866			BPF_LD_IMM64(R1, 3),
   9867			BPF_JMP_IMM(BPF_JSET, R1, 0xffffffff, 1),
   9868			BPF_EXIT_INSN(),
   9869			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9870			BPF_EXIT_INSN(),
   9871		},
   9872		INTERNAL,
   9873		{ },
   9874		{ { 0, 1 } },
   9875	},
   9876	/* BPF_JMP | BPF_JSGT | BPF_X */
   9877	{
   9878		"JMP_JSGT_X: Signed jump: if (-1 > -2) return 1",
   9879		.u.insns_int = {
   9880			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9881			BPF_LD_IMM64(R1, -1),
   9882			BPF_LD_IMM64(R2, -2),
   9883			BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
   9884			BPF_EXIT_INSN(),
   9885			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9886			BPF_EXIT_INSN(),
   9887		},
   9888		INTERNAL,
   9889		{ },
   9890		{ { 0, 1 } },
   9891	},
   9892	{
   9893		"JMP_JSGT_X: Signed jump: if (-1 > -1) return 0",
   9894		.u.insns_int = {
   9895			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9896			BPF_LD_IMM64(R1, -1),
   9897			BPF_LD_IMM64(R2, -1),
   9898			BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
   9899			BPF_EXIT_INSN(),
   9900			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9901			BPF_EXIT_INSN(),
   9902		},
   9903		INTERNAL,
   9904		{ },
   9905		{ { 0, 1 } },
   9906	},
   9907	/* BPF_JMP | BPF_JSLT | BPF_X */
   9908	{
   9909		"JMP_JSLT_X: Signed jump: if (-2 < -1) return 1",
   9910		.u.insns_int = {
   9911			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9912			BPF_LD_IMM64(R1, -1),
   9913			BPF_LD_IMM64(R2, -2),
   9914			BPF_JMP_REG(BPF_JSLT, R2, R1, 1),
   9915			BPF_EXIT_INSN(),
   9916			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9917			BPF_EXIT_INSN(),
   9918		},
   9919		INTERNAL,
   9920		{ },
   9921		{ { 0, 1 } },
   9922	},
   9923	{
   9924		"JMP_JSLT_X: Signed jump: if (-1 < -1) return 0",
   9925		.u.insns_int = {
   9926			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9927			BPF_LD_IMM64(R1, -1),
   9928			BPF_LD_IMM64(R2, -1),
   9929			BPF_JMP_REG(BPF_JSLT, R1, R2, 1),
   9930			BPF_EXIT_INSN(),
   9931			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9932			BPF_EXIT_INSN(),
   9933		},
   9934		INTERNAL,
   9935		{ },
   9936		{ { 0, 1 } },
   9937	},
   9938	/* BPF_JMP | BPF_JSGE | BPF_X */
   9939	{
   9940		"JMP_JSGE_X: Signed jump: if (-1 >= -2) return 1",
   9941		.u.insns_int = {
   9942			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9943			BPF_LD_IMM64(R1, -1),
   9944			BPF_LD_IMM64(R2, -2),
   9945			BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
   9946			BPF_EXIT_INSN(),
   9947			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9948			BPF_EXIT_INSN(),
   9949		},
   9950		INTERNAL,
   9951		{ },
   9952		{ { 0, 1 } },
   9953	},
   9954	{
   9955		"JMP_JSGE_X: Signed jump: if (-1 >= -1) return 1",
   9956		.u.insns_int = {
   9957			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9958			BPF_LD_IMM64(R1, -1),
   9959			BPF_LD_IMM64(R2, -1),
   9960			BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
   9961			BPF_EXIT_INSN(),
   9962			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9963			BPF_EXIT_INSN(),
   9964		},
   9965		INTERNAL,
   9966		{ },
   9967		{ { 0, 1 } },
   9968	},
   9969	/* BPF_JMP | BPF_JSLE | BPF_X */
   9970	{
   9971		"JMP_JSLE_X: Signed jump: if (-2 <= -1) return 1",
   9972		.u.insns_int = {
   9973			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9974			BPF_LD_IMM64(R1, -1),
   9975			BPF_LD_IMM64(R2, -2),
   9976			BPF_JMP_REG(BPF_JSLE, R2, R1, 1),
   9977			BPF_EXIT_INSN(),
   9978			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9979			BPF_EXIT_INSN(),
   9980		},
   9981		INTERNAL,
   9982		{ },
   9983		{ { 0, 1 } },
   9984	},
   9985	{
   9986		"JMP_JSLE_X: Signed jump: if (-1 <= -1) return 1",
   9987		.u.insns_int = {
   9988			BPF_ALU32_IMM(BPF_MOV, R0, 0),
   9989			BPF_LD_IMM64(R1, -1),
   9990			BPF_LD_IMM64(R2, -1),
   9991			BPF_JMP_REG(BPF_JSLE, R1, R2, 1),
   9992			BPF_EXIT_INSN(),
   9993			BPF_ALU32_IMM(BPF_MOV, R0, 1),
   9994			BPF_EXIT_INSN(),
   9995		},
   9996		INTERNAL,
   9997		{ },
   9998		{ { 0, 1 } },
   9999	},
  10000	/* BPF_JMP | BPF_JGT | BPF_X */
  10001	{
  10002		"JMP_JGT_X: if (3 > 2) return 1",
  10003		.u.insns_int = {
  10004			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10005			BPF_LD_IMM64(R1, 3),
  10006			BPF_LD_IMM64(R2, 2),
  10007			BPF_JMP_REG(BPF_JGT, R1, R2, 1),
  10008			BPF_EXIT_INSN(),
  10009			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10010			BPF_EXIT_INSN(),
  10011		},
  10012		INTERNAL,
  10013		{ },
  10014		{ { 0, 1 } },
  10015	},
  10016	{
  10017		"JMP_JGT_X: Unsigned jump: if (-1 > 1) return 1",
  10018		.u.insns_int = {
  10019			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10020			BPF_LD_IMM64(R1, -1),
  10021			BPF_LD_IMM64(R2, 1),
  10022			BPF_JMP_REG(BPF_JGT, R1, R2, 1),
  10023			BPF_EXIT_INSN(),
  10024			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10025			BPF_EXIT_INSN(),
  10026		},
  10027		INTERNAL,
  10028		{ },
  10029		{ { 0, 1 } },
  10030	},
  10031	/* BPF_JMP | BPF_JLT | BPF_X */
  10032	{
  10033		"JMP_JLT_X: if (2 < 3) return 1",
  10034		.u.insns_int = {
  10035			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10036			BPF_LD_IMM64(R1, 3),
  10037			BPF_LD_IMM64(R2, 2),
  10038			BPF_JMP_REG(BPF_JLT, R2, R1, 1),
  10039			BPF_EXIT_INSN(),
  10040			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10041			BPF_EXIT_INSN(),
  10042		},
  10043		INTERNAL,
  10044		{ },
  10045		{ { 0, 1 } },
  10046	},
  10047	{
  10048		"JMP_JLT_X: Unsigned jump: if (1 < -1) return 1",
  10049		.u.insns_int = {
  10050			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10051			BPF_LD_IMM64(R1, -1),
  10052			BPF_LD_IMM64(R2, 1),
  10053			BPF_JMP_REG(BPF_JLT, R2, R1, 1),
  10054			BPF_EXIT_INSN(),
  10055			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10056			BPF_EXIT_INSN(),
  10057		},
  10058		INTERNAL,
  10059		{ },
  10060		{ { 0, 1 } },
  10061	},
  10062	/* BPF_JMP | BPF_JGE | BPF_X */
  10063	{
  10064		"JMP_JGE_X: if (3 >= 2) return 1",
  10065		.u.insns_int = {
  10066			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10067			BPF_LD_IMM64(R1, 3),
  10068			BPF_LD_IMM64(R2, 2),
  10069			BPF_JMP_REG(BPF_JGE, R1, R2, 1),
  10070			BPF_EXIT_INSN(),
  10071			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10072			BPF_EXIT_INSN(),
  10073		},
  10074		INTERNAL,
  10075		{ },
  10076		{ { 0, 1 } },
  10077	},
  10078	{
  10079		"JMP_JGE_X: if (3 >= 3) return 1",
  10080		.u.insns_int = {
  10081			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10082			BPF_LD_IMM64(R1, 3),
  10083			BPF_LD_IMM64(R2, 3),
  10084			BPF_JMP_REG(BPF_JGE, R1, R2, 1),
  10085			BPF_EXIT_INSN(),
  10086			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10087			BPF_EXIT_INSN(),
  10088		},
  10089		INTERNAL,
  10090		{ },
  10091		{ { 0, 1 } },
  10092	},
  10093	/* BPF_JMP | BPF_JLE | BPF_X */
  10094	{
  10095		"JMP_JLE_X: if (2 <= 3) return 1",
  10096		.u.insns_int = {
  10097			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10098			BPF_LD_IMM64(R1, 3),
  10099			BPF_LD_IMM64(R2, 2),
  10100			BPF_JMP_REG(BPF_JLE, R2, R1, 1),
  10101			BPF_EXIT_INSN(),
  10102			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10103			BPF_EXIT_INSN(),
  10104		},
  10105		INTERNAL,
  10106		{ },
  10107		{ { 0, 1 } },
  10108	},
  10109	{
  10110		"JMP_JLE_X: if (3 <= 3) return 1",
  10111		.u.insns_int = {
  10112			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10113			BPF_LD_IMM64(R1, 3),
  10114			BPF_LD_IMM64(R2, 3),
  10115			BPF_JMP_REG(BPF_JLE, R1, R2, 1),
  10116			BPF_EXIT_INSN(),
  10117			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10118			BPF_EXIT_INSN(),
  10119		},
  10120		INTERNAL,
  10121		{ },
  10122		{ { 0, 1 } },
  10123	},
  10124	{
  10125		/* Mainly testing JIT + imm64 here. */
  10126		"JMP_JGE_X: ldimm64 test 1",
  10127		.u.insns_int = {
  10128			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10129			BPF_LD_IMM64(R1, 3),
  10130			BPF_LD_IMM64(R2, 2),
  10131			BPF_JMP_REG(BPF_JGE, R1, R2, 2),
  10132			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
  10133			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
  10134			BPF_EXIT_INSN(),
  10135		},
  10136		INTERNAL,
  10137		{ },
  10138		{ { 0, 0xeeeeeeeeU } },
  10139	},
  10140	{
  10141		"JMP_JGE_X: ldimm64 test 2",
  10142		.u.insns_int = {
  10143			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10144			BPF_LD_IMM64(R1, 3),
  10145			BPF_LD_IMM64(R2, 2),
  10146			BPF_JMP_REG(BPF_JGE, R1, R2, 0),
  10147			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
  10148			BPF_EXIT_INSN(),
  10149		},
  10150		INTERNAL,
  10151		{ },
  10152		{ { 0, 0xffffffffU } },
  10153	},
  10154	{
  10155		"JMP_JGE_X: ldimm64 test 3",
  10156		.u.insns_int = {
  10157			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10158			BPF_LD_IMM64(R1, 3),
  10159			BPF_LD_IMM64(R2, 2),
  10160			BPF_JMP_REG(BPF_JGE, R1, R2, 4),
  10161			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
  10162			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
  10163			BPF_EXIT_INSN(),
  10164		},
  10165		INTERNAL,
  10166		{ },
  10167		{ { 0, 1 } },
  10168	},
  10169	{
  10170		"JMP_JLE_X: ldimm64 test 1",
  10171		.u.insns_int = {
  10172			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10173			BPF_LD_IMM64(R1, 3),
  10174			BPF_LD_IMM64(R2, 2),
  10175			BPF_JMP_REG(BPF_JLE, R2, R1, 2),
  10176			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
  10177			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
  10178			BPF_EXIT_INSN(),
  10179		},
  10180		INTERNAL,
  10181		{ },
  10182		{ { 0, 0xeeeeeeeeU } },
  10183	},
  10184	{
  10185		"JMP_JLE_X: ldimm64 test 2",
  10186		.u.insns_int = {
  10187			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10188			BPF_LD_IMM64(R1, 3),
  10189			BPF_LD_IMM64(R2, 2),
  10190			BPF_JMP_REG(BPF_JLE, R2, R1, 0),
  10191			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
  10192			BPF_EXIT_INSN(),
  10193		},
  10194		INTERNAL,
  10195		{ },
  10196		{ { 0, 0xffffffffU } },
  10197	},
  10198	{
  10199		"JMP_JLE_X: ldimm64 test 3",
  10200		.u.insns_int = {
  10201			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10202			BPF_LD_IMM64(R1, 3),
  10203			BPF_LD_IMM64(R2, 2),
  10204			BPF_JMP_REG(BPF_JLE, R2, R1, 4),
  10205			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
  10206			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
  10207			BPF_EXIT_INSN(),
  10208		},
  10209		INTERNAL,
  10210		{ },
  10211		{ { 0, 1 } },
  10212	},
  10213	/* BPF_JMP | BPF_JNE | BPF_X */
  10214	{
  10215		"JMP_JNE_X: if (3 != 2) return 1",
  10216		.u.insns_int = {
  10217			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10218			BPF_LD_IMM64(R1, 3),
  10219			BPF_LD_IMM64(R2, 2),
  10220			BPF_JMP_REG(BPF_JNE, R1, R2, 1),
  10221			BPF_EXIT_INSN(),
  10222			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10223			BPF_EXIT_INSN(),
  10224		},
  10225		INTERNAL,
  10226		{ },
  10227		{ { 0, 1 } },
  10228	},
  10229	/* BPF_JMP | BPF_JEQ | BPF_X */
  10230	{
  10231		"JMP_JEQ_X: if (3 == 3) return 1",
  10232		.u.insns_int = {
  10233			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10234			BPF_LD_IMM64(R1, 3),
  10235			BPF_LD_IMM64(R2, 3),
  10236			BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
  10237			BPF_EXIT_INSN(),
  10238			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10239			BPF_EXIT_INSN(),
  10240		},
  10241		INTERNAL,
  10242		{ },
  10243		{ { 0, 1 } },
  10244	},
  10245	/* BPF_JMP | BPF_JSET | BPF_X */
  10246	{
  10247		"JMP_JSET_X: if (0x3 & 0x2) return 1",
  10248		.u.insns_int = {
  10249			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10250			BPF_LD_IMM64(R1, 3),
  10251			BPF_LD_IMM64(R2, 2),
  10252			BPF_JMP_REG(BPF_JSET, R1, R2, 1),
  10253			BPF_EXIT_INSN(),
  10254			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10255			BPF_EXIT_INSN(),
  10256		},
  10257		INTERNAL,
  10258		{ },
  10259		{ { 0, 1 } },
  10260	},
  10261	{
  10262		"JMP_JSET_X: if (0x3 & 0xffffffff) return 1",
  10263		.u.insns_int = {
  10264			BPF_ALU32_IMM(BPF_MOV, R0, 0),
  10265			BPF_LD_IMM64(R1, 3),
  10266			BPF_LD_IMM64(R2, 0xffffffff),
  10267			BPF_JMP_REG(BPF_JSET, R1, R2, 1),
  10268			BPF_EXIT_INSN(),
  10269			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  10270			BPF_EXIT_INSN(),
  10271		},
  10272		INTERNAL,
  10273		{ },
  10274		{ { 0, 1 } },
  10275	},
  10276	{
  10277		"JMP_JA: Jump, gap, jump, ...",
  10278		{ },
  10279		CLASSIC | FLAG_NO_DATA,
  10280		{ },
  10281		{ { 0, 0xababcbac } },
  10282		.fill_helper = bpf_fill_ja,
  10283	},
  10284	{	/* Mainly checking JIT here. */
  10285		"BPF_MAXINSNS: Maximum possible literals",
  10286		{ },
  10287		CLASSIC | FLAG_NO_DATA,
  10288		{ },
  10289		{ { 0, 0xffffffff } },
  10290		.fill_helper = bpf_fill_maxinsns1,
  10291	},
  10292	{	/* Mainly checking JIT here. */
  10293		"BPF_MAXINSNS: Single literal",
  10294		{ },
  10295		CLASSIC | FLAG_NO_DATA,
  10296		{ },
  10297		{ { 0, 0xfefefefe } },
  10298		.fill_helper = bpf_fill_maxinsns2,
  10299	},
  10300	{	/* Mainly checking JIT here. */
  10301		"BPF_MAXINSNS: Run/add until end",
  10302		{ },
  10303		CLASSIC | FLAG_NO_DATA,
  10304		{ },
  10305		{ { 0, 0x947bf368 } },
  10306		.fill_helper = bpf_fill_maxinsns3,
  10307	},
  10308	{
  10309		"BPF_MAXINSNS: Too many instructions",
  10310		{ },
  10311		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
  10312		{ },
  10313		{ },
  10314		.fill_helper = bpf_fill_maxinsns4,
  10315		.expected_errcode = -EINVAL,
  10316	},
  10317	{	/* Mainly checking JIT here. */
  10318		"BPF_MAXINSNS: Very long jump",
  10319		{ },
  10320		CLASSIC | FLAG_NO_DATA,
  10321		{ },
  10322		{ { 0, 0xabababab } },
  10323		.fill_helper = bpf_fill_maxinsns5,
  10324	},
  10325	{	/* Mainly checking JIT here. */
  10326		"BPF_MAXINSNS: Ctx heavy transformations",
  10327		{ },
  10328		CLASSIC,
  10329		{ },
  10330		{
  10331			{  1, SKB_VLAN_PRESENT },
  10332			{ 10, SKB_VLAN_PRESENT }
  10333		},
  10334		.fill_helper = bpf_fill_maxinsns6,
  10335	},
  10336	{	/* Mainly checking JIT here. */
  10337		"BPF_MAXINSNS: Call heavy transformations",
  10338		{ },
  10339		CLASSIC | FLAG_NO_DATA,
  10340		{ },
  10341		{ { 1, 0 }, { 10, 0 } },
  10342		.fill_helper = bpf_fill_maxinsns7,
  10343	},
  10344	{	/* Mainly checking JIT here. */
  10345		"BPF_MAXINSNS: Jump heavy test",
  10346		{ },
  10347		CLASSIC | FLAG_NO_DATA,
  10348		{ },
  10349		{ { 0, 0xffffffff } },
  10350		.fill_helper = bpf_fill_maxinsns8,
  10351	},
  10352	{	/* Mainly checking JIT here. */
  10353		"BPF_MAXINSNS: Very long jump backwards",
  10354		{ },
  10355		INTERNAL | FLAG_NO_DATA,
  10356		{ },
  10357		{ { 0, 0xcbababab } },
  10358		.fill_helper = bpf_fill_maxinsns9,
  10359	},
  10360	{	/* Mainly checking JIT here. */
  10361		"BPF_MAXINSNS: Edge hopping nuthouse",
  10362		{ },
  10363		INTERNAL | FLAG_NO_DATA,
  10364		{ },
  10365		{ { 0, 0xabababac } },
  10366		.fill_helper = bpf_fill_maxinsns10,
  10367	},
  10368	{
  10369		"BPF_MAXINSNS: Jump, gap, jump, ...",
  10370		{ },
  10371		CLASSIC | FLAG_NO_DATA,
  10372		{ },
  10373		{ { 0, 0xababcbac } },
  10374		.fill_helper = bpf_fill_maxinsns11,
  10375	},
  10376	{
  10377		"BPF_MAXINSNS: jump over MSH",
  10378		{ },
  10379		CLASSIC | FLAG_EXPECTED_FAIL,
  10380		{ 0xfa, 0xfb, 0xfc, 0xfd, },
  10381		{ { 4, 0xabababab } },
  10382		.fill_helper = bpf_fill_maxinsns12,
  10383		.expected_errcode = -EINVAL,
  10384	},
  10385	{
  10386		"BPF_MAXINSNS: exec all MSH",
  10387		{ },
  10388		CLASSIC,
  10389		{ 0xfa, 0xfb, 0xfc, 0xfd, },
  10390		{ { 4, 0xababab83 } },
  10391		.fill_helper = bpf_fill_maxinsns13,
  10392	},
  10393	{
  10394		"BPF_MAXINSNS: ld_abs+get_processor_id",
  10395		{ },
  10396		CLASSIC,
  10397		{ },
  10398		{ { 1, 0xbee } },
  10399		.fill_helper = bpf_fill_ld_abs_get_processor_id,
  10400	},
  10401	/*
  10402	 * LD_IND / LD_ABS on fragmented SKBs
  10403	 */
  10404	{
  10405		"LD_IND byte frag",
  10406		.u.insns = {
  10407			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
  10408			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x0),
  10409			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10410		},
  10411		CLASSIC | FLAG_SKB_FRAG,
  10412		{ },
  10413		{ {0x40, 0x42} },
  10414		.frag_data = {
  10415			0x42, 0x00, 0x00, 0x00,
  10416			0x43, 0x44, 0x00, 0x00,
  10417			0x21, 0x07, 0x19, 0x83,
  10418		},
  10419	},
  10420	{
  10421		"LD_IND halfword frag",
  10422		.u.insns = {
  10423			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
  10424			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x4),
  10425			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10426		},
  10427		CLASSIC | FLAG_SKB_FRAG,
  10428		{ },
  10429		{ {0x40, 0x4344} },
  10430		.frag_data = {
  10431			0x42, 0x00, 0x00, 0x00,
  10432			0x43, 0x44, 0x00, 0x00,
  10433			0x21, 0x07, 0x19, 0x83,
  10434		},
  10435	},
  10436	{
  10437		"LD_IND word frag",
  10438		.u.insns = {
  10439			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
  10440			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x8),
  10441			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10442		},
  10443		CLASSIC | FLAG_SKB_FRAG,
  10444		{ },
  10445		{ {0x40, 0x21071983} },
  10446		.frag_data = {
  10447			0x42, 0x00, 0x00, 0x00,
  10448			0x43, 0x44, 0x00, 0x00,
  10449			0x21, 0x07, 0x19, 0x83,
  10450		},
  10451	},
  10452	{
  10453		"LD_IND halfword mixed head/frag",
  10454		.u.insns = {
  10455			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
  10456			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
  10457			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10458		},
  10459		CLASSIC | FLAG_SKB_FRAG,
  10460		{ [0x3e] = 0x25, [0x3f] = 0x05, },
  10461		{ {0x40, 0x0519} },
  10462		.frag_data = { 0x19, 0x82 },
  10463	},
  10464	{
  10465		"LD_IND word mixed head/frag",
  10466		.u.insns = {
  10467			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
  10468			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
  10469			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10470		},
  10471		CLASSIC | FLAG_SKB_FRAG,
  10472		{ [0x3e] = 0x25, [0x3f] = 0x05, },
  10473		{ {0x40, 0x25051982} },
  10474		.frag_data = { 0x19, 0x82 },
  10475	},
  10476	{
  10477		"LD_ABS byte frag",
  10478		.u.insns = {
  10479			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x40),
  10480			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10481		},
  10482		CLASSIC | FLAG_SKB_FRAG,
  10483		{ },
  10484		{ {0x40, 0x42} },
  10485		.frag_data = {
  10486			0x42, 0x00, 0x00, 0x00,
  10487			0x43, 0x44, 0x00, 0x00,
  10488			0x21, 0x07, 0x19, 0x83,
  10489		},
  10490	},
  10491	{
  10492		"LD_ABS halfword frag",
  10493		.u.insns = {
  10494			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x44),
  10495			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10496		},
  10497		CLASSIC | FLAG_SKB_FRAG,
  10498		{ },
  10499		{ {0x40, 0x4344} },
  10500		.frag_data = {
  10501			0x42, 0x00, 0x00, 0x00,
  10502			0x43, 0x44, 0x00, 0x00,
  10503			0x21, 0x07, 0x19, 0x83,
  10504		},
  10505	},
  10506	{
  10507		"LD_ABS word frag",
  10508		.u.insns = {
  10509			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x48),
  10510			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10511		},
  10512		CLASSIC | FLAG_SKB_FRAG,
  10513		{ },
  10514		{ {0x40, 0x21071983} },
  10515		.frag_data = {
  10516			0x42, 0x00, 0x00, 0x00,
  10517			0x43, 0x44, 0x00, 0x00,
  10518			0x21, 0x07, 0x19, 0x83,
  10519		},
  10520	},
  10521	{
  10522		"LD_ABS halfword mixed head/frag",
  10523		.u.insns = {
  10524			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
  10525			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10526		},
  10527		CLASSIC | FLAG_SKB_FRAG,
  10528		{ [0x3e] = 0x25, [0x3f] = 0x05, },
  10529		{ {0x40, 0x0519} },
  10530		.frag_data = { 0x19, 0x82 },
  10531	},
  10532	{
  10533		"LD_ABS word mixed head/frag",
  10534		.u.insns = {
  10535			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3e),
  10536			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10537		},
  10538		CLASSIC | FLAG_SKB_FRAG,
  10539		{ [0x3e] = 0x25, [0x3f] = 0x05, },
  10540		{ {0x40, 0x25051982} },
  10541		.frag_data = { 0x19, 0x82 },
  10542	},
  10543	/*
  10544	 * LD_IND / LD_ABS on non fragmented SKBs
  10545	 */
  10546	{
  10547		/*
  10548		 * this tests that the JIT/interpreter correctly resets X
  10549		 * before using it in an LD_IND instruction.
  10550		 */
  10551		"LD_IND byte default X",
  10552		.u.insns = {
  10553			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
  10554			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10555		},
  10556		CLASSIC,
  10557		{ [0x1] = 0x42 },
  10558		{ {0x40, 0x42 } },
  10559	},
  10560	{
  10561		"LD_IND byte positive offset",
  10562		.u.insns = {
  10563			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  10564			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
  10565			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10566		},
  10567		CLASSIC,
  10568		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10569		{ {0x40, 0x82 } },
  10570	},
  10571	{
  10572		"LD_IND byte negative offset",
  10573		.u.insns = {
  10574			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  10575			BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x1),
  10576			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10577		},
  10578		CLASSIC,
  10579		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10580		{ {0x40, 0x05 } },
  10581	},
  10582	{
  10583		"LD_IND byte positive offset, all ff",
  10584		.u.insns = {
  10585			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  10586			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
  10587			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10588		},
  10589		CLASSIC,
  10590		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
  10591		{ {0x40, 0xff } },
  10592	},
  10593	{
  10594		"LD_IND byte positive offset, out of bounds",
  10595		.u.insns = {
  10596			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  10597			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
  10598			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10599		},
  10600		CLASSIC,
  10601		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10602		{ {0x3f, 0 }, },
  10603	},
  10604	{
  10605		"LD_IND byte negative offset, out of bounds",
  10606		.u.insns = {
  10607			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  10608			BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x3f),
  10609			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10610		},
  10611		CLASSIC,
  10612		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10613		{ {0x3f, 0 } },
  10614	},
  10615	{
  10616		"LD_IND byte negative offset, multiple calls",
  10617		.u.insns = {
  10618			BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
  10619			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 1),
  10620			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 2),
  10621			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 3),
  10622			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 4),
  10623			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10624		},
  10625		CLASSIC,
  10626		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10627		{ {0x40, 0x82 }, },
  10628	},
  10629	{
  10630		"LD_IND halfword positive offset",
  10631		.u.insns = {
  10632			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10633			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x2),
  10634			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10635		},
  10636		CLASSIC,
  10637		{
  10638			[0x1c] = 0xaa, [0x1d] = 0x55,
  10639			[0x1e] = 0xbb, [0x1f] = 0x66,
  10640			[0x20] = 0xcc, [0x21] = 0x77,
  10641			[0x22] = 0xdd, [0x23] = 0x88,
  10642		},
  10643		{ {0x40, 0xdd88 } },
  10644	},
  10645	{
  10646		"LD_IND halfword negative offset",
  10647		.u.insns = {
  10648			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10649			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x2),
  10650			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10651		},
  10652		CLASSIC,
  10653		{
  10654			[0x1c] = 0xaa, [0x1d] = 0x55,
  10655			[0x1e] = 0xbb, [0x1f] = 0x66,
  10656			[0x20] = 0xcc, [0x21] = 0x77,
  10657			[0x22] = 0xdd, [0x23] = 0x88,
  10658		},
  10659		{ {0x40, 0xbb66 } },
  10660	},
  10661	{
  10662		"LD_IND halfword unaligned",
  10663		.u.insns = {
  10664			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10665			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
  10666			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10667		},
  10668		CLASSIC,
  10669		{
  10670			[0x1c] = 0xaa, [0x1d] = 0x55,
  10671			[0x1e] = 0xbb, [0x1f] = 0x66,
  10672			[0x20] = 0xcc, [0x21] = 0x77,
  10673			[0x22] = 0xdd, [0x23] = 0x88,
  10674		},
  10675		{ {0x40, 0x66cc } },
  10676	},
  10677	{
  10678		"LD_IND halfword positive offset, all ff",
  10679		.u.insns = {
  10680			BPF_STMT(BPF_LDX | BPF_IMM, 0x3d),
  10681			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
  10682			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10683		},
  10684		CLASSIC,
  10685		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
  10686		{ {0x40, 0xffff } },
  10687	},
  10688	{
  10689		"LD_IND halfword positive offset, out of bounds",
  10690		.u.insns = {
  10691			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  10692			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
  10693			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10694		},
  10695		CLASSIC,
  10696		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10697		{ {0x3f, 0 }, },
  10698	},
  10699	{
  10700		"LD_IND halfword negative offset, out of bounds",
  10701		.u.insns = {
  10702			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  10703			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x3f),
  10704			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10705		},
  10706		CLASSIC,
  10707		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10708		{ {0x3f, 0 } },
  10709	},
  10710	{
  10711		"LD_IND word positive offset",
  10712		.u.insns = {
  10713			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10714			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x4),
  10715			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10716		},
  10717		CLASSIC,
  10718		{
  10719			[0x1c] = 0xaa, [0x1d] = 0x55,
  10720			[0x1e] = 0xbb, [0x1f] = 0x66,
  10721			[0x20] = 0xcc, [0x21] = 0x77,
  10722			[0x22] = 0xdd, [0x23] = 0x88,
  10723			[0x24] = 0xee, [0x25] = 0x99,
  10724			[0x26] = 0xff, [0x27] = 0xaa,
  10725		},
  10726		{ {0x40, 0xee99ffaa } },
  10727	},
  10728	{
  10729		"LD_IND word negative offset",
  10730		.u.insns = {
  10731			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10732			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x4),
  10733			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10734		},
  10735		CLASSIC,
  10736		{
  10737			[0x1c] = 0xaa, [0x1d] = 0x55,
  10738			[0x1e] = 0xbb, [0x1f] = 0x66,
  10739			[0x20] = 0xcc, [0x21] = 0x77,
  10740			[0x22] = 0xdd, [0x23] = 0x88,
  10741			[0x24] = 0xee, [0x25] = 0x99,
  10742			[0x26] = 0xff, [0x27] = 0xaa,
  10743		},
  10744		{ {0x40, 0xaa55bb66 } },
  10745	},
  10746	{
  10747		"LD_IND word unaligned (addr & 3 == 2)",
  10748		.u.insns = {
  10749			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10750			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
  10751			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10752		},
  10753		CLASSIC,
  10754		{
  10755			[0x1c] = 0xaa, [0x1d] = 0x55,
  10756			[0x1e] = 0xbb, [0x1f] = 0x66,
  10757			[0x20] = 0xcc, [0x21] = 0x77,
  10758			[0x22] = 0xdd, [0x23] = 0x88,
  10759			[0x24] = 0xee, [0x25] = 0x99,
  10760			[0x26] = 0xff, [0x27] = 0xaa,
  10761		},
  10762		{ {0x40, 0xbb66cc77 } },
  10763	},
  10764	{
  10765		"LD_IND word unaligned (addr & 3 == 1)",
  10766		.u.insns = {
  10767			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10768			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3),
  10769			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10770		},
  10771		CLASSIC,
  10772		{
  10773			[0x1c] = 0xaa, [0x1d] = 0x55,
  10774			[0x1e] = 0xbb, [0x1f] = 0x66,
  10775			[0x20] = 0xcc, [0x21] = 0x77,
  10776			[0x22] = 0xdd, [0x23] = 0x88,
  10777			[0x24] = 0xee, [0x25] = 0x99,
  10778			[0x26] = 0xff, [0x27] = 0xaa,
  10779		},
  10780		{ {0x40, 0x55bb66cc } },
  10781	},
  10782	{
  10783		"LD_IND word unaligned (addr & 3 == 3)",
  10784		.u.insns = {
  10785			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
  10786			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x1),
  10787			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10788		},
  10789		CLASSIC,
  10790		{
  10791			[0x1c] = 0xaa, [0x1d] = 0x55,
  10792			[0x1e] = 0xbb, [0x1f] = 0x66,
  10793			[0x20] = 0xcc, [0x21] = 0x77,
  10794			[0x22] = 0xdd, [0x23] = 0x88,
  10795			[0x24] = 0xee, [0x25] = 0x99,
  10796			[0x26] = 0xff, [0x27] = 0xaa,
  10797		},
  10798		{ {0x40, 0x66cc77dd } },
  10799	},
  10800	{
  10801		"LD_IND word positive offset, all ff",
  10802		.u.insns = {
  10803			BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
  10804			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
  10805			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10806		},
  10807		CLASSIC,
  10808		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
  10809		{ {0x40, 0xffffffff } },
  10810	},
  10811	{
  10812		"LD_IND word positive offset, out of bounds",
  10813		.u.insns = {
  10814			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  10815			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
  10816			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10817		},
  10818		CLASSIC,
  10819		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10820		{ {0x3f, 0 }, },
  10821	},
  10822	{
  10823		"LD_IND word negative offset, out of bounds",
  10824		.u.insns = {
  10825			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
  10826			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3f),
  10827			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10828		},
  10829		CLASSIC,
  10830		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10831		{ {0x3f, 0 } },
  10832	},
  10833	{
  10834		"LD_ABS byte",
  10835		.u.insns = {
  10836			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x20),
  10837			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10838		},
  10839		CLASSIC,
  10840		{
  10841			[0x1c] = 0xaa, [0x1d] = 0x55,
  10842			[0x1e] = 0xbb, [0x1f] = 0x66,
  10843			[0x20] = 0xcc, [0x21] = 0x77,
  10844			[0x22] = 0xdd, [0x23] = 0x88,
  10845			[0x24] = 0xee, [0x25] = 0x99,
  10846			[0x26] = 0xff, [0x27] = 0xaa,
  10847		},
  10848		{ {0x40, 0xcc } },
  10849	},
  10850	{
  10851		"LD_ABS byte positive offset, all ff",
  10852		.u.insns = {
  10853			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
  10854			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10855		},
  10856		CLASSIC,
  10857		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
  10858		{ {0x40, 0xff } },
  10859	},
  10860	{
  10861		"LD_ABS byte positive offset, out of bounds",
  10862		.u.insns = {
  10863			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
  10864			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10865		},
  10866		CLASSIC,
  10867		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10868		{ {0x3f, 0 }, },
  10869	},
  10870	{
  10871		"LD_ABS byte negative offset, out of bounds load",
  10872		.u.insns = {
  10873			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, -1),
  10874			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10875		},
  10876		CLASSIC | FLAG_EXPECTED_FAIL,
  10877		.expected_errcode = -EINVAL,
  10878	},
  10879	{
  10880		"LD_ABS byte negative offset, in bounds",
  10881		.u.insns = {
  10882			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
  10883			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10884		},
  10885		CLASSIC,
  10886		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10887		{ {0x40, 0x82 }, },
  10888	},
  10889	{
  10890		"LD_ABS byte negative offset, out of bounds",
  10891		.u.insns = {
  10892			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
  10893			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10894		},
  10895		CLASSIC,
  10896		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10897		{ {0x3f, 0 }, },
  10898	},
  10899	{
  10900		"LD_ABS byte negative offset, multiple calls",
  10901		.u.insns = {
  10902			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3c),
  10903			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3d),
  10904			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3e),
  10905			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
  10906			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10907		},
  10908		CLASSIC,
  10909		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10910		{ {0x40, 0x82 }, },
  10911	},
  10912	{
  10913		"LD_ABS halfword",
  10914		.u.insns = {
  10915			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x22),
  10916			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10917		},
  10918		CLASSIC,
  10919		{
  10920			[0x1c] = 0xaa, [0x1d] = 0x55,
  10921			[0x1e] = 0xbb, [0x1f] = 0x66,
  10922			[0x20] = 0xcc, [0x21] = 0x77,
  10923			[0x22] = 0xdd, [0x23] = 0x88,
  10924			[0x24] = 0xee, [0x25] = 0x99,
  10925			[0x26] = 0xff, [0x27] = 0xaa,
  10926		},
  10927		{ {0x40, 0xdd88 } },
  10928	},
  10929	{
  10930		"LD_ABS halfword unaligned",
  10931		.u.insns = {
  10932			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x25),
  10933			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10934		},
  10935		CLASSIC,
  10936		{
  10937			[0x1c] = 0xaa, [0x1d] = 0x55,
  10938			[0x1e] = 0xbb, [0x1f] = 0x66,
  10939			[0x20] = 0xcc, [0x21] = 0x77,
  10940			[0x22] = 0xdd, [0x23] = 0x88,
  10941			[0x24] = 0xee, [0x25] = 0x99,
  10942			[0x26] = 0xff, [0x27] = 0xaa,
  10943		},
  10944		{ {0x40, 0x99ff } },
  10945	},
  10946	{
  10947		"LD_ABS halfword positive offset, all ff",
  10948		.u.insns = {
  10949			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3e),
  10950			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10951		},
  10952		CLASSIC,
  10953		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
  10954		{ {0x40, 0xffff } },
  10955	},
  10956	{
  10957		"LD_ABS halfword positive offset, out of bounds",
  10958		.u.insns = {
  10959			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
  10960			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10961		},
  10962		CLASSIC,
  10963		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10964		{ {0x3f, 0 }, },
  10965	},
  10966	{
  10967		"LD_ABS halfword negative offset, out of bounds load",
  10968		.u.insns = {
  10969			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, -1),
  10970			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10971		},
  10972		CLASSIC | FLAG_EXPECTED_FAIL,
  10973		.expected_errcode = -EINVAL,
  10974	},
  10975	{
  10976		"LD_ABS halfword negative offset, in bounds",
  10977		.u.insns = {
  10978			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
  10979			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10980		},
  10981		CLASSIC,
  10982		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10983		{ {0x40, 0x1982 }, },
  10984	},
  10985	{
  10986		"LD_ABS halfword negative offset, out of bounds",
  10987		.u.insns = {
  10988			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
  10989			BPF_STMT(BPF_RET | BPF_A, 0x0),
  10990		},
  10991		CLASSIC,
  10992		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  10993		{ {0x3f, 0 }, },
  10994	},
  10995	{
  10996		"LD_ABS word",
  10997		.u.insns = {
  10998			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x1c),
  10999			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11000		},
  11001		CLASSIC,
  11002		{
  11003			[0x1c] = 0xaa, [0x1d] = 0x55,
  11004			[0x1e] = 0xbb, [0x1f] = 0x66,
  11005			[0x20] = 0xcc, [0x21] = 0x77,
  11006			[0x22] = 0xdd, [0x23] = 0x88,
  11007			[0x24] = 0xee, [0x25] = 0x99,
  11008			[0x26] = 0xff, [0x27] = 0xaa,
  11009		},
  11010		{ {0x40, 0xaa55bb66 } },
  11011	},
  11012	{
  11013		"LD_ABS word unaligned (addr & 3 == 2)",
  11014		.u.insns = {
  11015			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x22),
  11016			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11017		},
  11018		CLASSIC,
  11019		{
  11020			[0x1c] = 0xaa, [0x1d] = 0x55,
  11021			[0x1e] = 0xbb, [0x1f] = 0x66,
  11022			[0x20] = 0xcc, [0x21] = 0x77,
  11023			[0x22] = 0xdd, [0x23] = 0x88,
  11024			[0x24] = 0xee, [0x25] = 0x99,
  11025			[0x26] = 0xff, [0x27] = 0xaa,
  11026		},
  11027		{ {0x40, 0xdd88ee99 } },
  11028	},
  11029	{
  11030		"LD_ABS word unaligned (addr & 3 == 1)",
  11031		.u.insns = {
  11032			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x21),
  11033			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11034		},
  11035		CLASSIC,
  11036		{
  11037			[0x1c] = 0xaa, [0x1d] = 0x55,
  11038			[0x1e] = 0xbb, [0x1f] = 0x66,
  11039			[0x20] = 0xcc, [0x21] = 0x77,
  11040			[0x22] = 0xdd, [0x23] = 0x88,
  11041			[0x24] = 0xee, [0x25] = 0x99,
  11042			[0x26] = 0xff, [0x27] = 0xaa,
  11043		},
  11044		{ {0x40, 0x77dd88ee } },
  11045	},
  11046	{
  11047		"LD_ABS word unaligned (addr & 3 == 3)",
  11048		.u.insns = {
  11049			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x23),
  11050			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11051		},
  11052		CLASSIC,
  11053		{
  11054			[0x1c] = 0xaa, [0x1d] = 0x55,
  11055			[0x1e] = 0xbb, [0x1f] = 0x66,
  11056			[0x20] = 0xcc, [0x21] = 0x77,
  11057			[0x22] = 0xdd, [0x23] = 0x88,
  11058			[0x24] = 0xee, [0x25] = 0x99,
  11059			[0x26] = 0xff, [0x27] = 0xaa,
  11060		},
  11061		{ {0x40, 0x88ee99ff } },
  11062	},
  11063	{
  11064		"LD_ABS word positive offset, all ff",
  11065		.u.insns = {
  11066			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3c),
  11067			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11068		},
  11069		CLASSIC,
  11070		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
  11071		{ {0x40, 0xffffffff } },
  11072	},
  11073	{
  11074		"LD_ABS word positive offset, out of bounds",
  11075		.u.insns = {
  11076			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3f),
  11077			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11078		},
  11079		CLASSIC,
  11080		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  11081		{ {0x3f, 0 }, },
  11082	},
  11083	{
  11084		"LD_ABS word negative offset, out of bounds load",
  11085		.u.insns = {
  11086			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, -1),
  11087			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11088		},
  11089		CLASSIC | FLAG_EXPECTED_FAIL,
  11090		.expected_errcode = -EINVAL,
  11091	},
  11092	{
  11093		"LD_ABS word negative offset, in bounds",
  11094		.u.insns = {
  11095			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
  11096			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11097		},
  11098		CLASSIC,
  11099		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  11100		{ {0x40, 0x25051982 }, },
  11101	},
  11102	{
  11103		"LD_ABS word negative offset, out of bounds",
  11104		.u.insns = {
  11105			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
  11106			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11107		},
  11108		CLASSIC,
  11109		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  11110		{ {0x3f, 0 }, },
  11111	},
  11112	{
  11113		"LDX_MSH standalone, preserved A",
  11114		.u.insns = {
  11115			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
  11116			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
  11117			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11118		},
  11119		CLASSIC,
  11120		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  11121		{ {0x40, 0xffeebbaa }, },
  11122	},
  11123	{
  11124		"LDX_MSH standalone, preserved A 2",
  11125		.u.insns = {
  11126			BPF_STMT(BPF_LD | BPF_IMM, 0x175e9d63),
  11127			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
  11128			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3d),
  11129			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
  11130			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3f),
  11131			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11132		},
  11133		CLASSIC,
  11134		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  11135		{ {0x40, 0x175e9d63 }, },
  11136	},
  11137	{
  11138		"LDX_MSH standalone, test result 1",
  11139		.u.insns = {
  11140			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
  11141			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
  11142			BPF_STMT(BPF_MISC | BPF_TXA, 0),
  11143			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11144		},
  11145		CLASSIC,
  11146		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  11147		{ {0x40, 0x14 }, },
  11148	},
  11149	{
  11150		"LDX_MSH standalone, test result 2",
  11151		.u.insns = {
  11152			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
  11153			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
  11154			BPF_STMT(BPF_MISC | BPF_TXA, 0),
  11155			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11156		},
  11157		CLASSIC,
  11158		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  11159		{ {0x40, 0x24 }, },
  11160	},
  11161	{
  11162		"LDX_MSH standalone, negative offset",
  11163		.u.insns = {
  11164			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
  11165			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, -1),
  11166			BPF_STMT(BPF_MISC | BPF_TXA, 0),
  11167			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11168		},
  11169		CLASSIC,
  11170		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  11171		{ {0x40, 0 }, },
  11172	},
  11173	{
  11174		"LDX_MSH standalone, negative offset 2",
  11175		.u.insns = {
  11176			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
  11177			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, SKF_LL_OFF + 0x3e),
  11178			BPF_STMT(BPF_MISC | BPF_TXA, 0),
  11179			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11180		},
  11181		CLASSIC,
  11182		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  11183		{ {0x40, 0x24 }, },
  11184	},
  11185	{
  11186		"LDX_MSH standalone, out of bounds",
  11187		.u.insns = {
  11188			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
  11189			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x40),
  11190			BPF_STMT(BPF_MISC | BPF_TXA, 0),
  11191			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11192		},
  11193		CLASSIC,
  11194		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
  11195		{ {0x40, 0 }, },
  11196	},
  11197	/*
  11198	 * verify that the interpreter or JIT correctly sets A and X
  11199	 * to 0.
  11200	 */
  11201	{
  11202		"ADD default X",
  11203		.u.insns = {
  11204			/*
  11205			 * A = 0x42
  11206			 * A = A + X
  11207			 * ret A
  11208			 */
  11209			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
  11210			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  11211			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11212		},
  11213		CLASSIC | FLAG_NO_DATA,
  11214		{},
  11215		{ {0x1, 0x42 } },
  11216	},
  11217	{
  11218		"ADD default A",
  11219		.u.insns = {
  11220			/*
  11221			 * A = A + 0x42
  11222			 * ret A
  11223			 */
  11224			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0x42),
  11225			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11226		},
  11227		CLASSIC | FLAG_NO_DATA,
  11228		{},
  11229		{ {0x1, 0x42 } },
  11230	},
  11231	{
  11232		"SUB default X",
  11233		.u.insns = {
  11234			/*
  11235			 * A = 0x66
  11236			 * A = A - X
  11237			 * ret A
  11238			 */
  11239			BPF_STMT(BPF_LD | BPF_IMM, 0x66),
  11240			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
  11241			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11242		},
  11243		CLASSIC | FLAG_NO_DATA,
  11244		{},
  11245		{ {0x1, 0x66 } },
  11246	},
  11247	{
  11248		"SUB default A",
  11249		.u.insns = {
  11250			/*
  11251			 * A = A - -0x66
  11252			 * ret A
  11253			 */
  11254			BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, -0x66),
  11255			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11256		},
  11257		CLASSIC | FLAG_NO_DATA,
  11258		{},
  11259		{ {0x1, 0x66 } },
  11260	},
  11261	{
  11262		"MUL default X",
  11263		.u.insns = {
  11264			/*
  11265			 * A = 0x42
  11266			 * A = A * X
  11267			 * ret A
  11268			 */
  11269			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
  11270			BPF_STMT(BPF_ALU | BPF_MUL | BPF_X, 0),
  11271			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11272		},
  11273		CLASSIC | FLAG_NO_DATA,
  11274		{},
  11275		{ {0x1, 0x0 } },
  11276	},
  11277	{
  11278		"MUL default A",
  11279		.u.insns = {
  11280			/*
  11281			 * A = A * 0x66
  11282			 * ret A
  11283			 */
  11284			BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 0x66),
  11285			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11286		},
  11287		CLASSIC | FLAG_NO_DATA,
  11288		{},
  11289		{ {0x1, 0x0 } },
  11290	},
  11291	{
  11292		"DIV default X",
  11293		.u.insns = {
  11294			/*
  11295			 * A = 0x42
  11296			 * A = A / X ; this halt the filter execution if X is 0
  11297			 * ret 0x42
  11298			 */
  11299			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
  11300			BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
  11301			BPF_STMT(BPF_RET | BPF_K, 0x42),
  11302		},
  11303		CLASSIC | FLAG_NO_DATA,
  11304		{},
  11305		{ {0x1, 0x0 } },
  11306	},
  11307	{
  11308		"DIV default A",
  11309		.u.insns = {
  11310			/*
  11311			 * A = A / 1
  11312			 * ret A
  11313			 */
  11314			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x1),
  11315			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11316		},
  11317		CLASSIC | FLAG_NO_DATA,
  11318		{},
  11319		{ {0x1, 0x0 } },
  11320	},
  11321	{
  11322		"MOD default X",
  11323		.u.insns = {
  11324			/*
  11325			 * A = 0x42
  11326			 * A = A mod X ; this halt the filter execution if X is 0
  11327			 * ret 0x42
  11328			 */
  11329			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
  11330			BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
  11331			BPF_STMT(BPF_RET | BPF_K, 0x42),
  11332		},
  11333		CLASSIC | FLAG_NO_DATA,
  11334		{},
  11335		{ {0x1, 0x0 } },
  11336	},
  11337	{
  11338		"MOD default A",
  11339		.u.insns = {
  11340			/*
  11341			 * A = A mod 1
  11342			 * ret A
  11343			 */
  11344			BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x1),
  11345			BPF_STMT(BPF_RET | BPF_A, 0x0),
  11346		},
  11347		CLASSIC | FLAG_NO_DATA,
  11348		{},
  11349		{ {0x1, 0x0 } },
  11350	},
  11351	{
  11352		"JMP EQ default A",
  11353		.u.insns = {
  11354			/*
  11355			 * cmp A, 0x0, 0, 1
  11356			 * ret 0x42
  11357			 * ret 0x66
  11358			 */
  11359			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0, 0, 1),
  11360			BPF_STMT(BPF_RET | BPF_K, 0x42),
  11361			BPF_STMT(BPF_RET | BPF_K, 0x66),
  11362		},
  11363		CLASSIC | FLAG_NO_DATA,
  11364		{},
  11365		{ {0x1, 0x42 } },
  11366	},
  11367	{
  11368		"JMP EQ default X",
  11369		.u.insns = {
  11370			/*
  11371			 * A = 0x0
  11372			 * cmp A, X, 0, 1
  11373			 * ret 0x42
  11374			 * ret 0x66
  11375			 */
  11376			BPF_STMT(BPF_LD | BPF_IMM, 0x0),
  11377			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0x0, 0, 1),
  11378			BPF_STMT(BPF_RET | BPF_K, 0x42),
  11379			BPF_STMT(BPF_RET | BPF_K, 0x66),
  11380		},
  11381		CLASSIC | FLAG_NO_DATA,
  11382		{},
  11383		{ {0x1, 0x42 } },
  11384	},
  11385	/* Checking interpreter vs JIT wrt signed extended imms. */
  11386	{
  11387		"JNE signed compare, test 1",
  11388		.u.insns_int = {
  11389			BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
  11390			BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
  11391			BPF_MOV64_REG(R2, R1),
  11392			BPF_ALU64_REG(BPF_AND, R2, R3),
  11393			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  11394			BPF_JMP_IMM(BPF_JNE, R2, -17104896, 1),
  11395			BPF_ALU32_IMM(BPF_MOV, R0, 2),
  11396			BPF_EXIT_INSN(),
  11397		},
  11398		INTERNAL,
  11399		{ },
  11400		{ { 0, 1 } },
  11401	},
  11402	{
  11403		"JNE signed compare, test 2",
  11404		.u.insns_int = {
  11405			BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
  11406			BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
  11407			BPF_MOV64_REG(R2, R1),
  11408			BPF_ALU64_REG(BPF_AND, R2, R3),
  11409			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  11410			BPF_JMP_IMM(BPF_JNE, R2, 0xfefb0000, 1),
  11411			BPF_ALU32_IMM(BPF_MOV, R0, 2),
  11412			BPF_EXIT_INSN(),
  11413		},
  11414		INTERNAL,
  11415		{ },
  11416		{ { 0, 1 } },
  11417	},
  11418	{
  11419		"JNE signed compare, test 3",
  11420		.u.insns_int = {
  11421			BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
  11422			BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
  11423			BPF_ALU32_IMM(BPF_MOV, R4, 0xfefb0000),
  11424			BPF_MOV64_REG(R2, R1),
  11425			BPF_ALU64_REG(BPF_AND, R2, R3),
  11426			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  11427			BPF_JMP_REG(BPF_JNE, R2, R4, 1),
  11428			BPF_ALU32_IMM(BPF_MOV, R0, 2),
  11429			BPF_EXIT_INSN(),
  11430		},
  11431		INTERNAL,
  11432		{ },
  11433		{ { 0, 2 } },
  11434	},
  11435	{
  11436		"JNE signed compare, test 4",
  11437		.u.insns_int = {
  11438			BPF_LD_IMM64(R1, -17104896),
  11439			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  11440			BPF_JMP_IMM(BPF_JNE, R1, -17104896, 1),
  11441			BPF_ALU32_IMM(BPF_MOV, R0, 2),
  11442			BPF_EXIT_INSN(),
  11443		},
  11444		INTERNAL,
  11445		{ },
  11446		{ { 0, 2 } },
  11447	},
  11448	{
  11449		"JNE signed compare, test 5",
  11450		.u.insns_int = {
  11451			BPF_LD_IMM64(R1, 0xfefb0000),
  11452			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  11453			BPF_JMP_IMM(BPF_JNE, R1, 0xfefb0000, 1),
  11454			BPF_ALU32_IMM(BPF_MOV, R0, 2),
  11455			BPF_EXIT_INSN(),
  11456		},
  11457		INTERNAL,
  11458		{ },
  11459		{ { 0, 1 } },
  11460	},
  11461	{
  11462		"JNE signed compare, test 6",
  11463		.u.insns_int = {
  11464			BPF_LD_IMM64(R1, 0x7efb0000),
  11465			BPF_ALU32_IMM(BPF_MOV, R0, 1),
  11466			BPF_JMP_IMM(BPF_JNE, R1, 0x7efb0000, 1),
  11467			BPF_ALU32_IMM(BPF_MOV, R0, 2),
  11468			BPF_EXIT_INSN(),
  11469		},
  11470		INTERNAL,
  11471		{ },
  11472		{ { 0, 2 } },
  11473	},
  11474	{
  11475		"JNE signed compare, test 7",
  11476		.u.insns = {
  11477			BPF_STMT(BPF_LD | BPF_IMM, 0xffff0000),
  11478			BPF_STMT(BPF_MISC | BPF_TAX, 0),
  11479			BPF_STMT(BPF_LD | BPF_IMM, 0xfefbbc12),
  11480			BPF_STMT(BPF_ALU | BPF_AND | BPF_X, 0),
  11481			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0xfefb0000, 1, 0),
  11482			BPF_STMT(BPF_RET | BPF_K, 1),
  11483			BPF_STMT(BPF_RET | BPF_K, 2),
  11484		},
  11485		CLASSIC | FLAG_NO_DATA,
  11486		{},
  11487		{ { 0, 2 } },
  11488	},
  11489	/* BPF_LDX_MEM with operand aliasing */
  11490	{
  11491		"LDX_MEM_B: operand register aliasing",
  11492		.u.insns_int = {
  11493			BPF_ST_MEM(BPF_B, R10, -8, 123),
  11494			BPF_MOV64_REG(R0, R10),
  11495			BPF_LDX_MEM(BPF_B, R0, R0, -8),
  11496			BPF_EXIT_INSN(),
  11497		},
  11498		INTERNAL,
  11499		{ },
  11500		{ { 0, 123 } },
  11501		.stack_depth = 8,
  11502	},
  11503	{
  11504		"LDX_MEM_H: operand register aliasing",
  11505		.u.insns_int = {
  11506			BPF_ST_MEM(BPF_H, R10, -8, 12345),
  11507			BPF_MOV64_REG(R0, R10),
  11508			BPF_LDX_MEM(BPF_H, R0, R0, -8),
  11509			BPF_EXIT_INSN(),
  11510		},
  11511		INTERNAL,
  11512		{ },
  11513		{ { 0, 12345 } },
  11514		.stack_depth = 8,
  11515	},
  11516	{
  11517		"LDX_MEM_W: operand register aliasing",
  11518		.u.insns_int = {
  11519			BPF_ST_MEM(BPF_W, R10, -8, 123456789),
  11520			BPF_MOV64_REG(R0, R10),
  11521			BPF_LDX_MEM(BPF_W, R0, R0, -8),
  11522			BPF_EXIT_INSN(),
  11523		},
  11524		INTERNAL,
  11525		{ },
  11526		{ { 0, 123456789 } },
  11527		.stack_depth = 8,
  11528	},
  11529	{
  11530		"LDX_MEM_DW: operand register aliasing",
  11531		.u.insns_int = {
  11532			BPF_LD_IMM64(R1, 0x123456789abcdefULL),
  11533			BPF_STX_MEM(BPF_DW, R10, R1, -8),
  11534			BPF_MOV64_REG(R0, R10),
  11535			BPF_LDX_MEM(BPF_DW, R0, R0, -8),
  11536			BPF_ALU64_REG(BPF_SUB, R0, R1),
  11537			BPF_MOV64_REG(R1, R0),
  11538			BPF_ALU64_IMM(BPF_RSH, R1, 32),
  11539			BPF_ALU64_REG(BPF_OR, R0, R1),
  11540			BPF_EXIT_INSN(),
  11541		},
  11542		INTERNAL,
  11543		{ },
  11544		{ { 0, 0 } },
  11545		.stack_depth = 8,
  11546	},
  11547	/*
  11548	 * Register (non-)clobbering tests for the case where a JIT implements
  11549	 * complex ALU or ATOMIC operations via function calls. If so, the
  11550	 * function call must be transparent to the eBPF registers. The JIT
  11551	 * must therefore save and restore relevant registers across the call.
  11552	 * The following tests check that the eBPF registers retain their
  11553	 * values after such an operation. Mainly intended for complex ALU
  11554	 * and atomic operation, but we run it for all. You never know...
  11555	 *
  11556	 * Note that each operations should be tested twice with different
  11557	 * destinations, to check preservation for all registers.
  11558	 */
  11559#define BPF_TEST_CLOBBER_ALU(alu, op, dst, src)			\
  11560	{							\
  11561		#alu "_" #op " to " #dst ": no clobbering",	\
  11562		.u.insns_int = {				\
  11563			BPF_ALU64_IMM(BPF_MOV, R0, R0),		\
  11564			BPF_ALU64_IMM(BPF_MOV, R1, R1),		\
  11565			BPF_ALU64_IMM(BPF_MOV, R2, R2),		\
  11566			BPF_ALU64_IMM(BPF_MOV, R3, R3),		\
  11567			BPF_ALU64_IMM(BPF_MOV, R4, R4),		\
  11568			BPF_ALU64_IMM(BPF_MOV, R5, R5),		\
  11569			BPF_ALU64_IMM(BPF_MOV, R6, R6),		\
  11570			BPF_ALU64_IMM(BPF_MOV, R7, R7),		\
  11571			BPF_ALU64_IMM(BPF_MOV, R8, R8),		\
  11572			BPF_ALU64_IMM(BPF_MOV, R9, R9),		\
  11573			BPF_##alu(BPF_ ##op, dst, src),		\
  11574			BPF_ALU32_IMM(BPF_MOV, dst, dst),	\
  11575			BPF_JMP_IMM(BPF_JNE, R0, R0, 10),	\
  11576			BPF_JMP_IMM(BPF_JNE, R1, R1, 9),	\
  11577			BPF_JMP_IMM(BPF_JNE, R2, R2, 8),	\
  11578			BPF_JMP_IMM(BPF_JNE, R3, R3, 7),	\
  11579			BPF_JMP_IMM(BPF_JNE, R4, R4, 6),	\
  11580			BPF_JMP_IMM(BPF_JNE, R5, R5, 5),	\
  11581			BPF_JMP_IMM(BPF_JNE, R6, R6, 4),	\
  11582			BPF_JMP_IMM(BPF_JNE, R7, R7, 3),	\
  11583			BPF_JMP_IMM(BPF_JNE, R8, R8, 2),	\
  11584			BPF_JMP_IMM(BPF_JNE, R9, R9, 1),	\
  11585			BPF_ALU64_IMM(BPF_MOV, R0, 1),		\
  11586			BPF_EXIT_INSN(),			\
  11587		},						\
  11588		INTERNAL,					\
  11589		{ },						\
  11590		{ { 0, 1 } }					\
  11591	}
  11592	/* ALU64 operations, register clobbering */
  11593	BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R8, 123456789),
  11594	BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R9, 123456789),
  11595	BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R8, 123456789),
  11596	BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R9, 123456789),
  11597	BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R8, 123456789),
  11598	BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R9, 123456789),
  11599	BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R8, 12),
  11600	BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R9, 12),
  11601	BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R8, 12),
  11602	BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R9, 12),
  11603	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R8, 12),
  11604	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R9, 12),
  11605	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R8, 123456789),
  11606	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R9, 123456789),
  11607	BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R8, 123456789),
  11608	BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R9, 123456789),
  11609	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R8, 123456789),
  11610	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R9, 123456789),
  11611	BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R8, 123456789),
  11612	BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R9, 123456789),
  11613	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R8, 123456789),
  11614	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R9, 123456789),
  11615	/* ALU32 immediate operations, register clobbering */
  11616	BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R8, 123456789),
  11617	BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R9, 123456789),
  11618	BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R8, 123456789),
  11619	BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R9, 123456789),
  11620	BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R8, 123456789),
  11621	BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R9, 123456789),
  11622	BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R8, 12),
  11623	BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R9, 12),
  11624	BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R8, 12),
  11625	BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R9, 12),
  11626	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R8, 12),
  11627	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R9, 12),
  11628	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R8, 123456789),
  11629	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R9, 123456789),
  11630	BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R8, 123456789),
  11631	BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R9, 123456789),
  11632	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R8, 123456789),
  11633	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R9, 123456789),
  11634	BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R8, 123456789),
  11635	BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R9, 123456789),
  11636	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R8, 123456789),
  11637	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R9, 123456789),
  11638	/* ALU64 register operations, register clobbering */
  11639	BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R8, R1),
  11640	BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R9, R1),
  11641	BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R8, R1),
  11642	BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R9, R1),
  11643	BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R8, R1),
  11644	BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R9, R1),
  11645	BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R8, R1),
  11646	BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R9, R1),
  11647	BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R8, R1),
  11648	BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R9, R1),
  11649	BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R8, R1),
  11650	BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R9, R1),
  11651	BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R8, R1),
  11652	BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R9, R1),
  11653	BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R8, R1),
  11654	BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R9, R1),
  11655	BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R8, R1),
  11656	BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R9, R1),
  11657	BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R8, R1),
  11658	BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R9, R1),
  11659	BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R8, R1),
  11660	BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R9, R1),
  11661	/* ALU32 register operations, register clobbering */
  11662	BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R8, R1),
  11663	BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R9, R1),
  11664	BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R8, R1),
  11665	BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R9, R1),
  11666	BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R8, R1),
  11667	BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R9, R1),
  11668	BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R8, R1),
  11669	BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R9, R1),
  11670	BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R8, R1),
  11671	BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R9, R1),
  11672	BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R8, R1),
  11673	BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R9, R1),
  11674	BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R8, R1),
  11675	BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R9, R1),
  11676	BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R8, R1),
  11677	BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R9, R1),
  11678	BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R8, R1),
  11679	BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R9, R1),
  11680	BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R8, R1),
  11681	BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R9, R1),
  11682	BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R8, R1),
  11683	BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R9, R1),
  11684#undef BPF_TEST_CLOBBER_ALU
  11685#define BPF_TEST_CLOBBER_ATOMIC(width, op)			\
  11686	{							\
  11687		"Atomic_" #width " " #op ": no clobbering",	\
  11688		.u.insns_int = {				\
  11689			BPF_ALU64_IMM(BPF_MOV, R0, 0),		\
  11690			BPF_ALU64_IMM(BPF_MOV, R1, 1),		\
  11691			BPF_ALU64_IMM(BPF_MOV, R2, 2),		\
  11692			BPF_ALU64_IMM(BPF_MOV, R3, 3),		\
  11693			BPF_ALU64_IMM(BPF_MOV, R4, 4),		\
  11694			BPF_ALU64_IMM(BPF_MOV, R5, 5),		\
  11695			BPF_ALU64_IMM(BPF_MOV, R6, 6),		\
  11696			BPF_ALU64_IMM(BPF_MOV, R7, 7),		\
  11697			BPF_ALU64_IMM(BPF_MOV, R8, 8),		\
  11698			BPF_ALU64_IMM(BPF_MOV, R9, 9),		\
  11699			BPF_ST_MEM(width, R10, -8,		\
  11700				   (op) == BPF_CMPXCHG ? 0 :	\
  11701				   (op) & BPF_FETCH ? 1 : 0),	\
  11702			BPF_ATOMIC_OP(width, op, R10, R1, -8),	\
  11703			BPF_JMP_IMM(BPF_JNE, R0, 0, 10),	\
  11704			BPF_JMP_IMM(BPF_JNE, R1, 1, 9),		\
  11705			BPF_JMP_IMM(BPF_JNE, R2, 2, 8),		\
  11706			BPF_JMP_IMM(BPF_JNE, R3, 3, 7),		\
  11707			BPF_JMP_IMM(BPF_JNE, R4, 4, 6),		\
  11708			BPF_JMP_IMM(BPF_JNE, R5, 5, 5),		\
  11709			BPF_JMP_IMM(BPF_JNE, R6, 6, 4),		\
  11710			BPF_JMP_IMM(BPF_JNE, R7, 7, 3),		\
  11711			BPF_JMP_IMM(BPF_JNE, R8, 8, 2),		\
  11712			BPF_JMP_IMM(BPF_JNE, R9, 9, 1),		\
  11713			BPF_ALU64_IMM(BPF_MOV, R0, 1),		\
  11714			BPF_EXIT_INSN(),			\
  11715		},						\
  11716		INTERNAL,					\
  11717		{ },						\
  11718		{ { 0, 1 } },					\
  11719		.stack_depth = 8,				\
  11720	}
  11721	/* 64-bit atomic operations, register clobbering */
  11722	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD),
  11723	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND),
  11724	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR),
  11725	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR),
  11726	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD | BPF_FETCH),
  11727	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND | BPF_FETCH),
  11728	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR | BPF_FETCH),
  11729	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR | BPF_FETCH),
  11730	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XCHG),
  11731	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_CMPXCHG),
  11732	/* 32-bit atomic operations, register clobbering */
  11733	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD),
  11734	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND),
  11735	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR),
  11736	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR),
  11737	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD | BPF_FETCH),
  11738	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND | BPF_FETCH),
  11739	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR | BPF_FETCH),
  11740	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR | BPF_FETCH),
  11741	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XCHG),
  11742	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_CMPXCHG),
  11743#undef BPF_TEST_CLOBBER_ATOMIC
  11744	/* Checking that ALU32 src is not zero extended in place */
  11745#define BPF_ALU32_SRC_ZEXT(op)					\
  11746	{							\
  11747		"ALU32_" #op "_X: src preserved in zext",	\
  11748		.u.insns_int = {				\
  11749			BPF_LD_IMM64(R1, 0x0123456789acbdefULL),\
  11750			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),\
  11751			BPF_ALU64_REG(BPF_MOV, R0, R1),		\
  11752			BPF_ALU32_REG(BPF_##op, R2, R1),	\
  11753			BPF_ALU64_REG(BPF_SUB, R0, R1),		\
  11754			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
  11755			BPF_ALU64_IMM(BPF_RSH, R1, 32),		\
  11756			BPF_ALU64_REG(BPF_OR, R0, R1),		\
  11757			BPF_EXIT_INSN(),			\
  11758		},						\
  11759		INTERNAL,					\
  11760		{ },						\
  11761		{ { 0, 0 } },					\
  11762	}
  11763	BPF_ALU32_SRC_ZEXT(MOV),
  11764	BPF_ALU32_SRC_ZEXT(AND),
  11765	BPF_ALU32_SRC_ZEXT(OR),
  11766	BPF_ALU32_SRC_ZEXT(XOR),
  11767	BPF_ALU32_SRC_ZEXT(ADD),
  11768	BPF_ALU32_SRC_ZEXT(SUB),
  11769	BPF_ALU32_SRC_ZEXT(MUL),
  11770	BPF_ALU32_SRC_ZEXT(DIV),
  11771	BPF_ALU32_SRC_ZEXT(MOD),
  11772#undef BPF_ALU32_SRC_ZEXT
  11773	/* Checking that ATOMIC32 src is not zero extended in place */
  11774#define BPF_ATOMIC32_SRC_ZEXT(op)					\
  11775	{								\
  11776		"ATOMIC_W_" #op ": src preserved in zext",		\
  11777		.u.insns_int = {					\
  11778			BPF_LD_IMM64(R0, 0x0123456789acbdefULL),	\
  11779			BPF_ALU64_REG(BPF_MOV, R1, R0),			\
  11780			BPF_ST_MEM(BPF_W, R10, -4, 0),			\
  11781			BPF_ATOMIC_OP(BPF_W, BPF_##op, R10, R1, -4),	\
  11782			BPF_ALU64_REG(BPF_SUB, R0, R1),			\
  11783			BPF_ALU64_REG(BPF_MOV, R1, R0),			\
  11784			BPF_ALU64_IMM(BPF_RSH, R1, 32),			\
  11785			BPF_ALU64_REG(BPF_OR, R0, R1),			\
  11786			BPF_EXIT_INSN(),				\
  11787		},							\
  11788		INTERNAL,						\
  11789		{ },							\
  11790		{ { 0, 0 } },						\
  11791		.stack_depth = 8,					\
  11792	}
  11793	BPF_ATOMIC32_SRC_ZEXT(ADD),
  11794	BPF_ATOMIC32_SRC_ZEXT(AND),
  11795	BPF_ATOMIC32_SRC_ZEXT(OR),
  11796	BPF_ATOMIC32_SRC_ZEXT(XOR),
  11797#undef BPF_ATOMIC32_SRC_ZEXT
  11798	/* Checking that CMPXCHG32 src is not zero extended in place */
  11799	{
  11800		"ATOMIC_W_CMPXCHG: src preserved in zext",
  11801		.u.insns_int = {
  11802			BPF_LD_IMM64(R1, 0x0123456789acbdefULL),
  11803			BPF_ALU64_REG(BPF_MOV, R2, R1),
  11804			BPF_ALU64_REG(BPF_MOV, R0, 0),
  11805			BPF_ST_MEM(BPF_W, R10, -4, 0),
  11806			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R1, -4),
  11807			BPF_ALU64_REG(BPF_SUB, R1, R2),
  11808			BPF_ALU64_REG(BPF_MOV, R2, R1),
  11809			BPF_ALU64_IMM(BPF_RSH, R2, 32),
  11810			BPF_ALU64_REG(BPF_OR, R1, R2),
  11811			BPF_ALU64_REG(BPF_MOV, R0, R1),
  11812			BPF_EXIT_INSN(),
  11813		},
  11814		INTERNAL,
  11815		{ },
  11816		{ { 0, 0 } },
  11817		.stack_depth = 8,
  11818	},
  11819	/* Checking that JMP32 immediate src is not zero extended in place */
  11820#define BPF_JMP32_IMM_ZEXT(op)					\
  11821	{							\
  11822		"JMP32_" #op "_K: operand preserved in zext",	\
  11823		.u.insns_int = {				\
  11824			BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
  11825			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
  11826			BPF_JMP32_IMM(BPF_##op, R0, 1234, 1),	\
  11827			BPF_JMP_A(0), /* Nop */			\
  11828			BPF_ALU64_REG(BPF_SUB, R0, R1),		\
  11829			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
  11830			BPF_ALU64_IMM(BPF_RSH, R1, 32),		\
  11831			BPF_ALU64_REG(BPF_OR, R0, R1),		\
  11832			BPF_EXIT_INSN(),			\
  11833		},						\
  11834		INTERNAL,					\
  11835		{ },						\
  11836		{ { 0, 0 } },					\
  11837	}
  11838	BPF_JMP32_IMM_ZEXT(JEQ),
  11839	BPF_JMP32_IMM_ZEXT(JNE),
  11840	BPF_JMP32_IMM_ZEXT(JSET),
  11841	BPF_JMP32_IMM_ZEXT(JGT),
  11842	BPF_JMP32_IMM_ZEXT(JGE),
  11843	BPF_JMP32_IMM_ZEXT(JLT),
  11844	BPF_JMP32_IMM_ZEXT(JLE),
  11845	BPF_JMP32_IMM_ZEXT(JSGT),
  11846	BPF_JMP32_IMM_ZEXT(JSGE),
  11847	BPF_JMP32_IMM_ZEXT(JSGT),
  11848	BPF_JMP32_IMM_ZEXT(JSLT),
  11849	BPF_JMP32_IMM_ZEXT(JSLE),
  11850#undef BPF_JMP2_IMM_ZEXT
  11851	/* Checking that JMP32 dst & src are not zero extended in place */
  11852#define BPF_JMP32_REG_ZEXT(op)					\
  11853	{							\
  11854		"JMP32_" #op "_X: operands preserved in zext",	\
  11855		.u.insns_int = {				\
  11856			BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
  11857			BPF_LD_IMM64(R1, 0xfedcba9876543210ULL),\
  11858			BPF_ALU64_REG(BPF_MOV, R2, R0),		\
  11859			BPF_ALU64_REG(BPF_MOV, R3, R1),		\
  11860			BPF_JMP32_IMM(BPF_##op, R0, R1, 1),	\
  11861			BPF_JMP_A(0), /* Nop */			\
  11862			BPF_ALU64_REG(BPF_SUB, R0, R2),		\
  11863			BPF_ALU64_REG(BPF_SUB, R1, R3),		\
  11864			BPF_ALU64_REG(BPF_OR, R0, R1),		\
  11865			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
  11866			BPF_ALU64_IMM(BPF_RSH, R1, 32),		\
  11867			BPF_ALU64_REG(BPF_OR, R0, R1),		\
  11868			BPF_EXIT_INSN(),			\
  11869		},						\
  11870		INTERNAL,					\
  11871		{ },						\
  11872		{ { 0, 0 } },					\
  11873	}
  11874	BPF_JMP32_REG_ZEXT(JEQ),
  11875	BPF_JMP32_REG_ZEXT(JNE),
  11876	BPF_JMP32_REG_ZEXT(JSET),
  11877	BPF_JMP32_REG_ZEXT(JGT),
  11878	BPF_JMP32_REG_ZEXT(JGE),
  11879	BPF_JMP32_REG_ZEXT(JLT),
  11880	BPF_JMP32_REG_ZEXT(JLE),
  11881	BPF_JMP32_REG_ZEXT(JSGT),
  11882	BPF_JMP32_REG_ZEXT(JSGE),
  11883	BPF_JMP32_REG_ZEXT(JSGT),
  11884	BPF_JMP32_REG_ZEXT(JSLT),
  11885	BPF_JMP32_REG_ZEXT(JSLE),
  11886#undef BPF_JMP2_REG_ZEXT
  11887	/* ALU64 K register combinations */
  11888	{
  11889		"ALU64_MOV_K: registers",
  11890		{ },
  11891		INTERNAL,
  11892		{ },
  11893		{ { 0, 1 } },
  11894		.fill_helper = bpf_fill_alu64_mov_imm_regs,
  11895	},
  11896	{
  11897		"ALU64_AND_K: registers",
  11898		{ },
  11899		INTERNAL,
  11900		{ },
  11901		{ { 0, 1 } },
  11902		.fill_helper = bpf_fill_alu64_and_imm_regs,
  11903	},
  11904	{
  11905		"ALU64_OR_K: registers",
  11906		{ },
  11907		INTERNAL,
  11908		{ },
  11909		{ { 0, 1 } },
  11910		.fill_helper = bpf_fill_alu64_or_imm_regs,
  11911	},
  11912	{
  11913		"ALU64_XOR_K: registers",
  11914		{ },
  11915		INTERNAL,
  11916		{ },
  11917		{ { 0, 1 } },
  11918		.fill_helper = bpf_fill_alu64_xor_imm_regs,
  11919	},
  11920	{
  11921		"ALU64_LSH_K: registers",
  11922		{ },
  11923		INTERNAL,
  11924		{ },
  11925		{ { 0, 1 } },
  11926		.fill_helper = bpf_fill_alu64_lsh_imm_regs,
  11927	},
  11928	{
  11929		"ALU64_RSH_K: registers",
  11930		{ },
  11931		INTERNAL,
  11932		{ },
  11933		{ { 0, 1 } },
  11934		.fill_helper = bpf_fill_alu64_rsh_imm_regs,
  11935	},
  11936	{
  11937		"ALU64_ARSH_K: registers",
  11938		{ },
  11939		INTERNAL,
  11940		{ },
  11941		{ { 0, 1 } },
  11942		.fill_helper = bpf_fill_alu64_arsh_imm_regs,
  11943	},
  11944	{
  11945		"ALU64_ADD_K: registers",
  11946		{ },
  11947		INTERNAL,
  11948		{ },
  11949		{ { 0, 1 } },
  11950		.fill_helper = bpf_fill_alu64_add_imm_regs,
  11951	},
  11952	{
  11953		"ALU64_SUB_K: registers",
  11954		{ },
  11955		INTERNAL,
  11956		{ },
  11957		{ { 0, 1 } },
  11958		.fill_helper = bpf_fill_alu64_sub_imm_regs,
  11959	},
  11960	{
  11961		"ALU64_MUL_K: registers",
  11962		{ },
  11963		INTERNAL,
  11964		{ },
  11965		{ { 0, 1 } },
  11966		.fill_helper = bpf_fill_alu64_mul_imm_regs,
  11967	},
  11968	{
  11969		"ALU64_DIV_K: registers",
  11970		{ },
  11971		INTERNAL,
  11972		{ },
  11973		{ { 0, 1 } },
  11974		.fill_helper = bpf_fill_alu64_div_imm_regs,
  11975	},
  11976	{
  11977		"ALU64_MOD_K: registers",
  11978		{ },
  11979		INTERNAL,
  11980		{ },
  11981		{ { 0, 1 } },
  11982		.fill_helper = bpf_fill_alu64_mod_imm_regs,
  11983	},
  11984	/* ALU32 K registers */
  11985	{
  11986		"ALU32_MOV_K: registers",
  11987		{ },
  11988		INTERNAL,
  11989		{ },
  11990		{ { 0, 1 } },
  11991		.fill_helper = bpf_fill_alu32_mov_imm_regs,
  11992	},
  11993	{
  11994		"ALU32_AND_K: registers",
  11995		{ },
  11996		INTERNAL,
  11997		{ },
  11998		{ { 0, 1 } },
  11999		.fill_helper = bpf_fill_alu32_and_imm_regs,
  12000	},
  12001	{
  12002		"ALU32_OR_K: registers",
  12003		{ },
  12004		INTERNAL,
  12005		{ },
  12006		{ { 0, 1 } },
  12007		.fill_helper = bpf_fill_alu32_or_imm_regs,
  12008	},
  12009	{
  12010		"ALU32_XOR_K: registers",
  12011		{ },
  12012		INTERNAL,
  12013		{ },
  12014		{ { 0, 1 } },
  12015		.fill_helper = bpf_fill_alu32_xor_imm_regs,
  12016	},
  12017	{
  12018		"ALU32_LSH_K: registers",
  12019		{ },
  12020		INTERNAL,
  12021		{ },
  12022		{ { 0, 1 } },
  12023		.fill_helper = bpf_fill_alu32_lsh_imm_regs,
  12024	},
  12025	{
  12026		"ALU32_RSH_K: registers",
  12027		{ },
  12028		INTERNAL,
  12029		{ },
  12030		{ { 0, 1 } },
  12031		.fill_helper = bpf_fill_alu32_rsh_imm_regs,
  12032	},
  12033	{
  12034		"ALU32_ARSH_K: registers",
  12035		{ },
  12036		INTERNAL,
  12037		{ },
  12038		{ { 0, 1 } },
  12039		.fill_helper = bpf_fill_alu32_arsh_imm_regs,
  12040	},
  12041	{
  12042		"ALU32_ADD_K: registers",
  12043		{ },
  12044		INTERNAL,
  12045		{ },
  12046		{ { 0, 1 } },
  12047		.fill_helper = bpf_fill_alu32_add_imm_regs,
  12048	},
  12049	{
  12050		"ALU32_SUB_K: registers",
  12051		{ },
  12052		INTERNAL,
  12053		{ },
  12054		{ { 0, 1 } },
  12055		.fill_helper = bpf_fill_alu32_sub_imm_regs,
  12056	},
  12057	{
  12058		"ALU32_MUL_K: registers",
  12059		{ },
  12060		INTERNAL,
  12061		{ },
  12062		{ { 0, 1 } },
  12063		.fill_helper = bpf_fill_alu32_mul_imm_regs,
  12064	},
  12065	{
  12066		"ALU32_DIV_K: registers",
  12067		{ },
  12068		INTERNAL,
  12069		{ },
  12070		{ { 0, 1 } },
  12071		.fill_helper = bpf_fill_alu32_div_imm_regs,
  12072	},
  12073	{
  12074		"ALU32_MOD_K: registers",
  12075		{ },
  12076		INTERNAL,
  12077		{ },
  12078		{ { 0, 1 } },
  12079		.fill_helper = bpf_fill_alu32_mod_imm_regs,
  12080	},
  12081	/* ALU64 X register combinations */
  12082	{
  12083		"ALU64_MOV_X: register combinations",
  12084		{ },
  12085		INTERNAL,
  12086		{ },
  12087		{ { 0, 1 } },
  12088		.fill_helper = bpf_fill_alu64_mov_reg_pairs,
  12089	},
  12090	{
  12091		"ALU64_AND_X: register combinations",
  12092		{ },
  12093		INTERNAL,
  12094		{ },
  12095		{ { 0, 1 } },
  12096		.fill_helper = bpf_fill_alu64_and_reg_pairs,
  12097	},
  12098	{
  12099		"ALU64_OR_X: register combinations",
  12100		{ },
  12101		INTERNAL,
  12102		{ },
  12103		{ { 0, 1 } },
  12104		.fill_helper = bpf_fill_alu64_or_reg_pairs,
  12105	},
  12106	{
  12107		"ALU64_XOR_X: register combinations",
  12108		{ },
  12109		INTERNAL,
  12110		{ },
  12111		{ { 0, 1 } },
  12112		.fill_helper = bpf_fill_alu64_xor_reg_pairs,
  12113	},
  12114	{
  12115		"ALU64_LSH_X: register combinations",
  12116		{ },
  12117		INTERNAL,
  12118		{ },
  12119		{ { 0, 1 } },
  12120		.fill_helper = bpf_fill_alu64_lsh_reg_pairs,
  12121	},
  12122	{
  12123		"ALU64_RSH_X: register combinations",
  12124		{ },
  12125		INTERNAL,
  12126		{ },
  12127		{ { 0, 1 } },
  12128		.fill_helper = bpf_fill_alu64_rsh_reg_pairs,
  12129	},
  12130	{
  12131		"ALU64_ARSH_X: register combinations",
  12132		{ },
  12133		INTERNAL,
  12134		{ },
  12135		{ { 0, 1 } },
  12136		.fill_helper = bpf_fill_alu64_arsh_reg_pairs,
  12137	},
  12138	{
  12139		"ALU64_ADD_X: register combinations",
  12140		{ },
  12141		INTERNAL,
  12142		{ },
  12143		{ { 0, 1 } },
  12144		.fill_helper = bpf_fill_alu64_add_reg_pairs,
  12145	},
  12146	{
  12147		"ALU64_SUB_X: register combinations",
  12148		{ },
  12149		INTERNAL,
  12150		{ },
  12151		{ { 0, 1 } },
  12152		.fill_helper = bpf_fill_alu64_sub_reg_pairs,
  12153	},
  12154	{
  12155		"ALU64_MUL_X: register combinations",
  12156		{ },
  12157		INTERNAL,
  12158		{ },
  12159		{ { 0, 1 } },
  12160		.fill_helper = bpf_fill_alu64_mul_reg_pairs,
  12161	},
  12162	{
  12163		"ALU64_DIV_X: register combinations",
  12164		{ },
  12165		INTERNAL,
  12166		{ },
  12167		{ { 0, 1 } },
  12168		.fill_helper = bpf_fill_alu64_div_reg_pairs,
  12169	},
  12170	{
  12171		"ALU64_MOD_X: register combinations",
  12172		{ },
  12173		INTERNAL,
  12174		{ },
  12175		{ { 0, 1 } },
  12176		.fill_helper = bpf_fill_alu64_mod_reg_pairs,
  12177	},
  12178	/* ALU32 X register combinations */
  12179	{
  12180		"ALU32_MOV_X: register combinations",
  12181		{ },
  12182		INTERNAL,
  12183		{ },
  12184		{ { 0, 1 } },
  12185		.fill_helper = bpf_fill_alu32_mov_reg_pairs,
  12186	},
  12187	{
  12188		"ALU32_AND_X: register combinations",
  12189		{ },
  12190		INTERNAL,
  12191		{ },
  12192		{ { 0, 1 } },
  12193		.fill_helper = bpf_fill_alu32_and_reg_pairs,
  12194	},
  12195	{
  12196		"ALU32_OR_X: register combinations",
  12197		{ },
  12198		INTERNAL,
  12199		{ },
  12200		{ { 0, 1 } },
  12201		.fill_helper = bpf_fill_alu32_or_reg_pairs,
  12202	},
  12203	{
  12204		"ALU32_XOR_X: register combinations",
  12205		{ },
  12206		INTERNAL,
  12207		{ },
  12208		{ { 0, 1 } },
  12209		.fill_helper = bpf_fill_alu32_xor_reg_pairs,
  12210	},
  12211	{
  12212		"ALU32_LSH_X: register combinations",
  12213		{ },
  12214		INTERNAL,
  12215		{ },
  12216		{ { 0, 1 } },
  12217		.fill_helper = bpf_fill_alu32_lsh_reg_pairs,
  12218	},
  12219	{
  12220		"ALU32_RSH_X: register combinations",
  12221		{ },
  12222		INTERNAL,
  12223		{ },
  12224		{ { 0, 1 } },
  12225		.fill_helper = bpf_fill_alu32_rsh_reg_pairs,
  12226	},
  12227	{
  12228		"ALU32_ARSH_X: register combinations",
  12229		{ },
  12230		INTERNAL,
  12231		{ },
  12232		{ { 0, 1 } },
  12233		.fill_helper = bpf_fill_alu32_arsh_reg_pairs,
  12234	},
  12235	{
  12236		"ALU32_ADD_X: register combinations",
  12237		{ },
  12238		INTERNAL,
  12239		{ },
  12240		{ { 0, 1 } },
  12241		.fill_helper = bpf_fill_alu32_add_reg_pairs,
  12242	},
  12243	{
  12244		"ALU32_SUB_X: register combinations",
  12245		{ },
  12246		INTERNAL,
  12247		{ },
  12248		{ { 0, 1 } },
  12249		.fill_helper = bpf_fill_alu32_sub_reg_pairs,
  12250	},
  12251	{
  12252		"ALU32_MUL_X: register combinations",
  12253		{ },
  12254		INTERNAL,
  12255		{ },
  12256		{ { 0, 1 } },
  12257		.fill_helper = bpf_fill_alu32_mul_reg_pairs,
  12258	},
  12259	{
  12260		"ALU32_DIV_X: register combinations",
  12261		{ },
  12262		INTERNAL,
  12263		{ },
  12264		{ { 0, 1 } },
  12265		.fill_helper = bpf_fill_alu32_div_reg_pairs,
  12266	},
  12267	{
  12268		"ALU32_MOD_X register combinations",
  12269		{ },
  12270		INTERNAL,
  12271		{ },
  12272		{ { 0, 1 } },
  12273		.fill_helper = bpf_fill_alu32_mod_reg_pairs,
  12274	},
  12275	/* Exhaustive test of ALU64 shift operations */
  12276	{
  12277		"ALU64_LSH_K: all shift values",
  12278		{ },
  12279		INTERNAL | FLAG_NO_DATA,
  12280		{ },
  12281		{ { 0, 1 } },
  12282		.fill_helper = bpf_fill_alu64_lsh_imm,
  12283	},
  12284	{
  12285		"ALU64_RSH_K: all shift values",
  12286		{ },
  12287		INTERNAL | FLAG_NO_DATA,
  12288		{ },
  12289		{ { 0, 1 } },
  12290		.fill_helper = bpf_fill_alu64_rsh_imm,
  12291	},
  12292	{
  12293		"ALU64_ARSH_K: all shift values",
  12294		{ },
  12295		INTERNAL | FLAG_NO_DATA,
  12296		{ },
  12297		{ { 0, 1 } },
  12298		.fill_helper = bpf_fill_alu64_arsh_imm,
  12299	},
  12300	{
  12301		"ALU64_LSH_X: all shift values",
  12302		{ },
  12303		INTERNAL | FLAG_NO_DATA,
  12304		{ },
  12305		{ { 0, 1 } },
  12306		.fill_helper = bpf_fill_alu64_lsh_reg,
  12307	},
  12308	{
  12309		"ALU64_RSH_X: all shift values",
  12310		{ },
  12311		INTERNAL | FLAG_NO_DATA,
  12312		{ },
  12313		{ { 0, 1 } },
  12314		.fill_helper = bpf_fill_alu64_rsh_reg,
  12315	},
  12316	{
  12317		"ALU64_ARSH_X: all shift values",
  12318		{ },
  12319		INTERNAL | FLAG_NO_DATA,
  12320		{ },
  12321		{ { 0, 1 } },
  12322		.fill_helper = bpf_fill_alu64_arsh_reg,
  12323	},
  12324	/* Exhaustive test of ALU32 shift operations */
  12325	{
  12326		"ALU32_LSH_K: all shift values",
  12327		{ },
  12328		INTERNAL | FLAG_NO_DATA,
  12329		{ },
  12330		{ { 0, 1 } },
  12331		.fill_helper = bpf_fill_alu32_lsh_imm,
  12332	},
  12333	{
  12334		"ALU32_RSH_K: all shift values",
  12335		{ },
  12336		INTERNAL | FLAG_NO_DATA,
  12337		{ },
  12338		{ { 0, 1 } },
  12339		.fill_helper = bpf_fill_alu32_rsh_imm,
  12340	},
  12341	{
  12342		"ALU32_ARSH_K: all shift values",
  12343		{ },
  12344		INTERNAL | FLAG_NO_DATA,
  12345		{ },
  12346		{ { 0, 1 } },
  12347		.fill_helper = bpf_fill_alu32_arsh_imm,
  12348	},
  12349	{
  12350		"ALU32_LSH_X: all shift values",
  12351		{ },
  12352		INTERNAL | FLAG_NO_DATA,
  12353		{ },
  12354		{ { 0, 1 } },
  12355		.fill_helper = bpf_fill_alu32_lsh_reg,
  12356	},
  12357	{
  12358		"ALU32_RSH_X: all shift values",
  12359		{ },
  12360		INTERNAL | FLAG_NO_DATA,
  12361		{ },
  12362		{ { 0, 1 } },
  12363		.fill_helper = bpf_fill_alu32_rsh_reg,
  12364	},
  12365	{
  12366		"ALU32_ARSH_X: all shift values",
  12367		{ },
  12368		INTERNAL | FLAG_NO_DATA,
  12369		{ },
  12370		{ { 0, 1 } },
  12371		.fill_helper = bpf_fill_alu32_arsh_reg,
  12372	},
  12373	/*
  12374	 * Exhaustive test of ALU64 shift operations when
  12375	 * source and destination register are the same.
  12376	 */
  12377	{
  12378		"ALU64_LSH_X: all shift values with the same register",
  12379		{ },
  12380		INTERNAL | FLAG_NO_DATA,
  12381		{ },
  12382		{ { 0, 1 } },
  12383		.fill_helper = bpf_fill_alu64_lsh_same_reg,
  12384	},
  12385	{
  12386		"ALU64_RSH_X: all shift values with the same register",
  12387		{ },
  12388		INTERNAL | FLAG_NO_DATA,
  12389		{ },
  12390		{ { 0, 1 } },
  12391		.fill_helper = bpf_fill_alu64_rsh_same_reg,
  12392	},
  12393	{
  12394		"ALU64_ARSH_X: all shift values with the same register",
  12395		{ },
  12396		INTERNAL | FLAG_NO_DATA,
  12397		{ },
  12398		{ { 0, 1 } },
  12399		.fill_helper = bpf_fill_alu64_arsh_same_reg,
  12400	},
  12401	/*
  12402	 * Exhaustive test of ALU32 shift operations when
  12403	 * source and destination register are the same.
  12404	 */
  12405	{
  12406		"ALU32_LSH_X: all shift values with the same register",
  12407		{ },
  12408		INTERNAL | FLAG_NO_DATA,
  12409		{ },
  12410		{ { 0, 1 } },
  12411		.fill_helper = bpf_fill_alu32_lsh_same_reg,
  12412	},
  12413	{
  12414		"ALU32_RSH_X: all shift values with the same register",
  12415		{ },
  12416		INTERNAL | FLAG_NO_DATA,
  12417		{ },
  12418		{ { 0, 1 } },
  12419		.fill_helper = bpf_fill_alu32_rsh_same_reg,
  12420	},
  12421	{
  12422		"ALU32_ARSH_X: all shift values with the same register",
  12423		{ },
  12424		INTERNAL | FLAG_NO_DATA,
  12425		{ },
  12426		{ { 0, 1 } },
  12427		.fill_helper = bpf_fill_alu32_arsh_same_reg,
  12428	},
  12429	/* ALU64 immediate magnitudes */
  12430	{
  12431		"ALU64_MOV_K: all immediate value magnitudes",
  12432		{ },
  12433		INTERNAL | FLAG_NO_DATA,
  12434		{ },
  12435		{ { 0, 1 } },
  12436		.fill_helper = bpf_fill_alu64_mov_imm,
  12437		.nr_testruns = NR_PATTERN_RUNS,
  12438	},
  12439	{
  12440		"ALU64_AND_K: all immediate value magnitudes",
  12441		{ },
  12442		INTERNAL | FLAG_NO_DATA,
  12443		{ },
  12444		{ { 0, 1 } },
  12445		.fill_helper = bpf_fill_alu64_and_imm,
  12446		.nr_testruns = NR_PATTERN_RUNS,
  12447	},
  12448	{
  12449		"ALU64_OR_K: all immediate value magnitudes",
  12450		{ },
  12451		INTERNAL | FLAG_NO_DATA,
  12452		{ },
  12453		{ { 0, 1 } },
  12454		.fill_helper = bpf_fill_alu64_or_imm,
  12455		.nr_testruns = NR_PATTERN_RUNS,
  12456	},
  12457	{
  12458		"ALU64_XOR_K: all immediate value magnitudes",
  12459		{ },
  12460		INTERNAL | FLAG_NO_DATA,
  12461		{ },
  12462		{ { 0, 1 } },
  12463		.fill_helper = bpf_fill_alu64_xor_imm,
  12464		.nr_testruns = NR_PATTERN_RUNS,
  12465	},
  12466	{
  12467		"ALU64_ADD_K: all immediate value magnitudes",
  12468		{ },
  12469		INTERNAL | FLAG_NO_DATA,
  12470		{ },
  12471		{ { 0, 1 } },
  12472		.fill_helper = bpf_fill_alu64_add_imm,
  12473		.nr_testruns = NR_PATTERN_RUNS,
  12474	},
  12475	{
  12476		"ALU64_SUB_K: all immediate value magnitudes",
  12477		{ },
  12478		INTERNAL | FLAG_NO_DATA,
  12479		{ },
  12480		{ { 0, 1 } },
  12481		.fill_helper = bpf_fill_alu64_sub_imm,
  12482		.nr_testruns = NR_PATTERN_RUNS,
  12483	},
  12484	{
  12485		"ALU64_MUL_K: all immediate value magnitudes",
  12486		{ },
  12487		INTERNAL | FLAG_NO_DATA,
  12488		{ },
  12489		{ { 0, 1 } },
  12490		.fill_helper = bpf_fill_alu64_mul_imm,
  12491		.nr_testruns = NR_PATTERN_RUNS,
  12492	},
  12493	{
  12494		"ALU64_DIV_K: all immediate value magnitudes",
  12495		{ },
  12496		INTERNAL | FLAG_NO_DATA,
  12497		{ },
  12498		{ { 0, 1 } },
  12499		.fill_helper = bpf_fill_alu64_div_imm,
  12500		.nr_testruns = NR_PATTERN_RUNS,
  12501	},
  12502	{
  12503		"ALU64_MOD_K: all immediate value magnitudes",
  12504		{ },
  12505		INTERNAL | FLAG_NO_DATA,
  12506		{ },
  12507		{ { 0, 1 } },
  12508		.fill_helper = bpf_fill_alu64_mod_imm,
  12509		.nr_testruns = NR_PATTERN_RUNS,
  12510	},
  12511	/* ALU32 immediate magnitudes */
  12512	{
  12513		"ALU32_MOV_K: all immediate value magnitudes",
  12514		{ },
  12515		INTERNAL | FLAG_NO_DATA,
  12516		{ },
  12517		{ { 0, 1 } },
  12518		.fill_helper = bpf_fill_alu32_mov_imm,
  12519		.nr_testruns = NR_PATTERN_RUNS,
  12520	},
  12521	{
  12522		"ALU32_AND_K: all immediate value magnitudes",
  12523		{ },
  12524		INTERNAL | FLAG_NO_DATA,
  12525		{ },
  12526		{ { 0, 1 } },
  12527		.fill_helper = bpf_fill_alu32_and_imm,
  12528		.nr_testruns = NR_PATTERN_RUNS,
  12529	},
  12530	{
  12531		"ALU32_OR_K: all immediate value magnitudes",
  12532		{ },
  12533		INTERNAL | FLAG_NO_DATA,
  12534		{ },
  12535		{ { 0, 1 } },
  12536		.fill_helper = bpf_fill_alu32_or_imm,
  12537		.nr_testruns = NR_PATTERN_RUNS,
  12538	},
  12539	{
  12540		"ALU32_XOR_K: all immediate value magnitudes",
  12541		{ },
  12542		INTERNAL | FLAG_NO_DATA,
  12543		{ },
  12544		{ { 0, 1 } },
  12545		.fill_helper = bpf_fill_alu32_xor_imm,
  12546		.nr_testruns = NR_PATTERN_RUNS,
  12547	},
  12548	{
  12549		"ALU32_ADD_K: all immediate value magnitudes",
  12550		{ },
  12551		INTERNAL | FLAG_NO_DATA,
  12552		{ },
  12553		{ { 0, 1 } },
  12554		.fill_helper = bpf_fill_alu32_add_imm,
  12555		.nr_testruns = NR_PATTERN_RUNS,
  12556	},
  12557	{
  12558		"ALU32_SUB_K: all immediate value magnitudes",
  12559		{ },
  12560		INTERNAL | FLAG_NO_DATA,
  12561		{ },
  12562		{ { 0, 1 } },
  12563		.fill_helper = bpf_fill_alu32_sub_imm,
  12564		.nr_testruns = NR_PATTERN_RUNS,
  12565	},
  12566	{
  12567		"ALU32_MUL_K: all immediate value magnitudes",
  12568		{ },
  12569		INTERNAL | FLAG_NO_DATA,
  12570		{ },
  12571		{ { 0, 1 } },
  12572		.fill_helper = bpf_fill_alu32_mul_imm,
  12573		.nr_testruns = NR_PATTERN_RUNS,
  12574	},
  12575	{
  12576		"ALU32_DIV_K: all immediate value magnitudes",
  12577		{ },
  12578		INTERNAL | FLAG_NO_DATA,
  12579		{ },
  12580		{ { 0, 1 } },
  12581		.fill_helper = bpf_fill_alu32_div_imm,
  12582		.nr_testruns = NR_PATTERN_RUNS,
  12583	},
  12584	{
  12585		"ALU32_MOD_K: all immediate value magnitudes",
  12586		{ },
  12587		INTERNAL | FLAG_NO_DATA,
  12588		{ },
  12589		{ { 0, 1 } },
  12590		.fill_helper = bpf_fill_alu32_mod_imm,
  12591		.nr_testruns = NR_PATTERN_RUNS,
  12592	},
  12593	/* ALU64 register magnitudes */
  12594	{
  12595		"ALU64_MOV_X: all register value magnitudes",
  12596		{ },
  12597		INTERNAL | FLAG_NO_DATA,
  12598		{ },
  12599		{ { 0, 1 } },
  12600		.fill_helper = bpf_fill_alu64_mov_reg,
  12601		.nr_testruns = NR_PATTERN_RUNS,
  12602	},
  12603	{
  12604		"ALU64_AND_X: all register value magnitudes",
  12605		{ },
  12606		INTERNAL | FLAG_NO_DATA,
  12607		{ },
  12608		{ { 0, 1 } },
  12609		.fill_helper = bpf_fill_alu64_and_reg,
  12610		.nr_testruns = NR_PATTERN_RUNS,
  12611	},
  12612	{
  12613		"ALU64_OR_X: all register value magnitudes",
  12614		{ },
  12615		INTERNAL | FLAG_NO_DATA,
  12616		{ },
  12617		{ { 0, 1 } },
  12618		.fill_helper = bpf_fill_alu64_or_reg,
  12619		.nr_testruns = NR_PATTERN_RUNS,
  12620	},
  12621	{
  12622		"ALU64_XOR_X: all register value magnitudes",
  12623		{ },
  12624		INTERNAL | FLAG_NO_DATA,
  12625		{ },
  12626		{ { 0, 1 } },
  12627		.fill_helper = bpf_fill_alu64_xor_reg,
  12628		.nr_testruns = NR_PATTERN_RUNS,
  12629	},
  12630	{
  12631		"ALU64_ADD_X: all register value magnitudes",
  12632		{ },
  12633		INTERNAL | FLAG_NO_DATA,
  12634		{ },
  12635		{ { 0, 1 } },
  12636		.fill_helper = bpf_fill_alu64_add_reg,
  12637		.nr_testruns = NR_PATTERN_RUNS,
  12638	},
  12639	{
  12640		"ALU64_SUB_X: all register value magnitudes",
  12641		{ },
  12642		INTERNAL | FLAG_NO_DATA,
  12643		{ },
  12644		{ { 0, 1 } },
  12645		.fill_helper = bpf_fill_alu64_sub_reg,
  12646		.nr_testruns = NR_PATTERN_RUNS,
  12647	},
  12648	{
  12649		"ALU64_MUL_X: all register value magnitudes",
  12650		{ },
  12651		INTERNAL | FLAG_NO_DATA,
  12652		{ },
  12653		{ { 0, 1 } },
  12654		.fill_helper = bpf_fill_alu64_mul_reg,
  12655		.nr_testruns = NR_PATTERN_RUNS,
  12656	},
  12657	{
  12658		"ALU64_DIV_X: all register value magnitudes",
  12659		{ },
  12660		INTERNAL | FLAG_NO_DATA,
  12661		{ },
  12662		{ { 0, 1 } },
  12663		.fill_helper = bpf_fill_alu64_div_reg,
  12664		.nr_testruns = NR_PATTERN_RUNS,
  12665	},
  12666	{
  12667		"ALU64_MOD_X: all register value magnitudes",
  12668		{ },
  12669		INTERNAL | FLAG_NO_DATA,
  12670		{ },
  12671		{ { 0, 1 } },
  12672		.fill_helper = bpf_fill_alu64_mod_reg,
  12673		.nr_testruns = NR_PATTERN_RUNS,
  12674	},
  12675	/* ALU32 register magnitudes */
  12676	{
  12677		"ALU32_MOV_X: all register value magnitudes",
  12678		{ },
  12679		INTERNAL | FLAG_NO_DATA,
  12680		{ },
  12681		{ { 0, 1 } },
  12682		.fill_helper = bpf_fill_alu32_mov_reg,
  12683		.nr_testruns = NR_PATTERN_RUNS,
  12684	},
  12685	{
  12686		"ALU32_AND_X: all register value magnitudes",
  12687		{ },
  12688		INTERNAL | FLAG_NO_DATA,
  12689		{ },
  12690		{ { 0, 1 } },
  12691		.fill_helper = bpf_fill_alu32_and_reg,
  12692		.nr_testruns = NR_PATTERN_RUNS,
  12693	},
  12694	{
  12695		"ALU32_OR_X: all register value magnitudes",
  12696		{ },
  12697		INTERNAL | FLAG_NO_DATA,
  12698		{ },
  12699		{ { 0, 1 } },
  12700		.fill_helper = bpf_fill_alu32_or_reg,
  12701		.nr_testruns = NR_PATTERN_RUNS,
  12702	},
  12703	{
  12704		"ALU32_XOR_X: all register value magnitudes",
  12705		{ },
  12706		INTERNAL | FLAG_NO_DATA,
  12707		{ },
  12708		{ { 0, 1 } },
  12709		.fill_helper = bpf_fill_alu32_xor_reg,
  12710		.nr_testruns = NR_PATTERN_RUNS,
  12711	},
  12712	{
  12713		"ALU32_ADD_X: all register value magnitudes",
  12714		{ },
  12715		INTERNAL | FLAG_NO_DATA,
  12716		{ },
  12717		{ { 0, 1 } },
  12718		.fill_helper = bpf_fill_alu32_add_reg,
  12719		.nr_testruns = NR_PATTERN_RUNS,
  12720	},
  12721	{
  12722		"ALU32_SUB_X: all register value magnitudes",
  12723		{ },
  12724		INTERNAL | FLAG_NO_DATA,
  12725		{ },
  12726		{ { 0, 1 } },
  12727		.fill_helper = bpf_fill_alu32_sub_reg,
  12728		.nr_testruns = NR_PATTERN_RUNS,
  12729	},
  12730	{
  12731		"ALU32_MUL_X: all register value magnitudes",
  12732		{ },
  12733		INTERNAL | FLAG_NO_DATA,
  12734		{ },
  12735		{ { 0, 1 } },
  12736		.fill_helper = bpf_fill_alu32_mul_reg,
  12737		.nr_testruns = NR_PATTERN_RUNS,
  12738	},
  12739	{
  12740		"ALU32_DIV_X: all register value magnitudes",
  12741		{ },
  12742		INTERNAL | FLAG_NO_DATA,
  12743		{ },
  12744		{ { 0, 1 } },
  12745		.fill_helper = bpf_fill_alu32_div_reg,
  12746		.nr_testruns = NR_PATTERN_RUNS,
  12747	},
  12748	{
  12749		"ALU32_MOD_X: all register value magnitudes",
  12750		{ },
  12751		INTERNAL | FLAG_NO_DATA,
  12752		{ },
  12753		{ { 0, 1 } },
  12754		.fill_helper = bpf_fill_alu32_mod_reg,
  12755		.nr_testruns = NR_PATTERN_RUNS,
  12756	},
  12757	/* LD_IMM64 immediate magnitudes and byte patterns */
  12758	{
  12759		"LD_IMM64: all immediate value magnitudes",
  12760		{ },
  12761		INTERNAL | FLAG_NO_DATA,
  12762		{ },
  12763		{ { 0, 1 } },
  12764		.fill_helper = bpf_fill_ld_imm64_magn,
  12765	},
  12766	{
  12767		"LD_IMM64: checker byte patterns",
  12768		{ },
  12769		INTERNAL | FLAG_NO_DATA,
  12770		{ },
  12771		{ { 0, 1 } },
  12772		.fill_helper = bpf_fill_ld_imm64_checker,
  12773	},
  12774	{
  12775		"LD_IMM64: random positive and zero byte patterns",
  12776		{ },
  12777		INTERNAL | FLAG_NO_DATA,
  12778		{ },
  12779		{ { 0, 1 } },
  12780		.fill_helper = bpf_fill_ld_imm64_pos_zero,
  12781	},
  12782	{
  12783		"LD_IMM64: random negative and zero byte patterns",
  12784		{ },
  12785		INTERNAL | FLAG_NO_DATA,
  12786		{ },
  12787		{ { 0, 1 } },
  12788		.fill_helper = bpf_fill_ld_imm64_neg_zero,
  12789	},
  12790	{
  12791		"LD_IMM64: random positive and negative byte patterns",
  12792		{ },
  12793		INTERNAL | FLAG_NO_DATA,
  12794		{ },
  12795		{ { 0, 1 } },
  12796		.fill_helper = bpf_fill_ld_imm64_pos_neg,
  12797	},
  12798	/* 64-bit ATOMIC register combinations */
  12799	{
  12800		"ATOMIC_DW_ADD: register combinations",
  12801		{ },
  12802		INTERNAL,
  12803		{ },
  12804		{ { 0, 1 } },
  12805		.fill_helper = bpf_fill_atomic64_add_reg_pairs,
  12806		.stack_depth = 8,
  12807	},
  12808	{
  12809		"ATOMIC_DW_AND: register combinations",
  12810		{ },
  12811		INTERNAL,
  12812		{ },
  12813		{ { 0, 1 } },
  12814		.fill_helper = bpf_fill_atomic64_and_reg_pairs,
  12815		.stack_depth = 8,
  12816	},
  12817	{
  12818		"ATOMIC_DW_OR: register combinations",
  12819		{ },
  12820		INTERNAL,
  12821		{ },
  12822		{ { 0, 1 } },
  12823		.fill_helper = bpf_fill_atomic64_or_reg_pairs,
  12824		.stack_depth = 8,
  12825	},
  12826	{
  12827		"ATOMIC_DW_XOR: register combinations",
  12828		{ },
  12829		INTERNAL,
  12830		{ },
  12831		{ { 0, 1 } },
  12832		.fill_helper = bpf_fill_atomic64_xor_reg_pairs,
  12833		.stack_depth = 8,
  12834	},
  12835	{
  12836		"ATOMIC_DW_ADD_FETCH: register combinations",
  12837		{ },
  12838		INTERNAL,
  12839		{ },
  12840		{ { 0, 1 } },
  12841		.fill_helper = bpf_fill_atomic64_add_fetch_reg_pairs,
  12842		.stack_depth = 8,
  12843	},
  12844	{
  12845		"ATOMIC_DW_AND_FETCH: register combinations",
  12846		{ },
  12847		INTERNAL,
  12848		{ },
  12849		{ { 0, 1 } },
  12850		.fill_helper = bpf_fill_atomic64_and_fetch_reg_pairs,
  12851		.stack_depth = 8,
  12852	},
  12853	{
  12854		"ATOMIC_DW_OR_FETCH: register combinations",
  12855		{ },
  12856		INTERNAL,
  12857		{ },
  12858		{ { 0, 1 } },
  12859		.fill_helper = bpf_fill_atomic64_or_fetch_reg_pairs,
  12860		.stack_depth = 8,
  12861	},
  12862	{
  12863		"ATOMIC_DW_XOR_FETCH: register combinations",
  12864		{ },
  12865		INTERNAL,
  12866		{ },
  12867		{ { 0, 1 } },
  12868		.fill_helper = bpf_fill_atomic64_xor_fetch_reg_pairs,
  12869		.stack_depth = 8,
  12870	},
  12871	{
  12872		"ATOMIC_DW_XCHG: register combinations",
  12873		{ },
  12874		INTERNAL,
  12875		{ },
  12876		{ { 0, 1 } },
  12877		.fill_helper = bpf_fill_atomic64_xchg_reg_pairs,
  12878		.stack_depth = 8,
  12879	},
  12880	{
  12881		"ATOMIC_DW_CMPXCHG: register combinations",
  12882		{ },
  12883		INTERNAL,
  12884		{ },
  12885		{ { 0, 1 } },
  12886		.fill_helper = bpf_fill_atomic64_cmpxchg_reg_pairs,
  12887		.stack_depth = 8,
  12888	},
  12889	/* 32-bit ATOMIC register combinations */
  12890	{
  12891		"ATOMIC_W_ADD: register combinations",
  12892		{ },
  12893		INTERNAL,
  12894		{ },
  12895		{ { 0, 1 } },
  12896		.fill_helper = bpf_fill_atomic32_add_reg_pairs,
  12897		.stack_depth = 8,
  12898	},
  12899	{
  12900		"ATOMIC_W_AND: register combinations",
  12901		{ },
  12902		INTERNAL,
  12903		{ },
  12904		{ { 0, 1 } },
  12905		.fill_helper = bpf_fill_atomic32_and_reg_pairs,
  12906		.stack_depth = 8,
  12907	},
  12908	{
  12909		"ATOMIC_W_OR: register combinations",
  12910		{ },
  12911		INTERNAL,
  12912		{ },
  12913		{ { 0, 1 } },
  12914		.fill_helper = bpf_fill_atomic32_or_reg_pairs,
  12915		.stack_depth = 8,
  12916	},
  12917	{
  12918		"ATOMIC_W_XOR: register combinations",
  12919		{ },
  12920		INTERNAL,
  12921		{ },
  12922		{ { 0, 1 } },
  12923		.fill_helper = bpf_fill_atomic32_xor_reg_pairs,
  12924		.stack_depth = 8,
  12925	},
  12926	{
  12927		"ATOMIC_W_ADD_FETCH: register combinations",
  12928		{ },
  12929		INTERNAL,
  12930		{ },
  12931		{ { 0, 1 } },
  12932		.fill_helper = bpf_fill_atomic32_add_fetch_reg_pairs,
  12933		.stack_depth = 8,
  12934	},
  12935	{
  12936		"ATOMIC_W_AND_FETCH: register combinations",
  12937		{ },
  12938		INTERNAL,
  12939		{ },
  12940		{ { 0, 1 } },
  12941		.fill_helper = bpf_fill_atomic32_and_fetch_reg_pairs,
  12942		.stack_depth = 8,
  12943	},
  12944	{
  12945		"ATOMIC_W_OR_FETCH: register combinations",
  12946		{ },
  12947		INTERNAL,
  12948		{ },
  12949		{ { 0, 1 } },
  12950		.fill_helper = bpf_fill_atomic32_or_fetch_reg_pairs,
  12951		.stack_depth = 8,
  12952	},
  12953	{
  12954		"ATOMIC_W_XOR_FETCH: register combinations",
  12955		{ },
  12956		INTERNAL,
  12957		{ },
  12958		{ { 0, 1 } },
  12959		.fill_helper = bpf_fill_atomic32_xor_fetch_reg_pairs,
  12960		.stack_depth = 8,
  12961	},
  12962	{
  12963		"ATOMIC_W_XCHG: register combinations",
  12964		{ },
  12965		INTERNAL,
  12966		{ },
  12967		{ { 0, 1 } },
  12968		.fill_helper = bpf_fill_atomic32_xchg_reg_pairs,
  12969		.stack_depth = 8,
  12970	},
  12971	{
  12972		"ATOMIC_W_CMPXCHG: register combinations",
  12973		{ },
  12974		INTERNAL,
  12975		{ },
  12976		{ { 0, 1 } },
  12977		.fill_helper = bpf_fill_atomic32_cmpxchg_reg_pairs,
  12978		.stack_depth = 8,
  12979	},
  12980	/* 64-bit ATOMIC magnitudes */
  12981	{
  12982		"ATOMIC_DW_ADD: all operand magnitudes",
  12983		{ },
  12984		INTERNAL | FLAG_NO_DATA,
  12985		{ },
  12986		{ { 0, 1 } },
  12987		.fill_helper = bpf_fill_atomic64_add,
  12988		.stack_depth = 8,
  12989		.nr_testruns = NR_PATTERN_RUNS,
  12990	},
  12991	{
  12992		"ATOMIC_DW_AND: all operand magnitudes",
  12993		{ },
  12994		INTERNAL | FLAG_NO_DATA,
  12995		{ },
  12996		{ { 0, 1 } },
  12997		.fill_helper = bpf_fill_atomic64_and,
  12998		.stack_depth = 8,
  12999		.nr_testruns = NR_PATTERN_RUNS,
  13000	},
  13001	{
  13002		"ATOMIC_DW_OR: all operand magnitudes",
  13003		{ },
  13004		INTERNAL | FLAG_NO_DATA,
  13005		{ },
  13006		{ { 0, 1 } },
  13007		.fill_helper = bpf_fill_atomic64_or,
  13008		.stack_depth = 8,
  13009		.nr_testruns = NR_PATTERN_RUNS,
  13010	},
  13011	{
  13012		"ATOMIC_DW_XOR: all operand magnitudes",
  13013		{ },
  13014		INTERNAL | FLAG_NO_DATA,
  13015		{ },
  13016		{ { 0, 1 } },
  13017		.fill_helper = bpf_fill_atomic64_xor,
  13018		.stack_depth = 8,
  13019		.nr_testruns = NR_PATTERN_RUNS,
  13020	},
  13021	{
  13022		"ATOMIC_DW_ADD_FETCH: all operand magnitudes",
  13023		{ },
  13024		INTERNAL | FLAG_NO_DATA,
  13025		{ },
  13026		{ { 0, 1 } },
  13027		.fill_helper = bpf_fill_atomic64_add_fetch,
  13028		.stack_depth = 8,
  13029		.nr_testruns = NR_PATTERN_RUNS,
  13030	},
  13031	{
  13032		"ATOMIC_DW_AND_FETCH: all operand magnitudes",
  13033		{ },
  13034		INTERNAL | FLAG_NO_DATA,
  13035		{ },
  13036		{ { 0, 1 } },
  13037		.fill_helper = bpf_fill_atomic64_and_fetch,
  13038		.stack_depth = 8,
  13039		.nr_testruns = NR_PATTERN_RUNS,
  13040	},
  13041	{
  13042		"ATOMIC_DW_OR_FETCH: all operand magnitudes",
  13043		{ },
  13044		INTERNAL | FLAG_NO_DATA,
  13045		{ },
  13046		{ { 0, 1 } },
  13047		.fill_helper = bpf_fill_atomic64_or_fetch,
  13048		.stack_depth = 8,
  13049		.nr_testruns = NR_PATTERN_RUNS,
  13050	},
  13051	{
  13052		"ATOMIC_DW_XOR_FETCH: all operand magnitudes",
  13053		{ },
  13054		INTERNAL | FLAG_NO_DATA,
  13055		{ },
  13056		{ { 0, 1 } },
  13057		.fill_helper = bpf_fill_atomic64_xor_fetch,
  13058		.stack_depth = 8,
  13059		.nr_testruns = NR_PATTERN_RUNS,
  13060	},
  13061	{
  13062		"ATOMIC_DW_XCHG: all operand magnitudes",
  13063		{ },
  13064		INTERNAL | FLAG_NO_DATA,
  13065		{ },
  13066		{ { 0, 1 } },
  13067		.fill_helper = bpf_fill_atomic64_xchg,
  13068		.stack_depth = 8,
  13069		.nr_testruns = NR_PATTERN_RUNS,
  13070	},
  13071	{
  13072		"ATOMIC_DW_CMPXCHG: all operand magnitudes",
  13073		{ },
  13074		INTERNAL | FLAG_NO_DATA,
  13075		{ },
  13076		{ { 0, 1 } },
  13077		.fill_helper = bpf_fill_cmpxchg64,
  13078		.stack_depth = 8,
  13079		.nr_testruns = NR_PATTERN_RUNS,
  13080	},
  13081	/* 64-bit atomic magnitudes */
  13082	{
  13083		"ATOMIC_W_ADD: all operand magnitudes",
  13084		{ },
  13085		INTERNAL | FLAG_NO_DATA,
  13086		{ },
  13087		{ { 0, 1 } },
  13088		.fill_helper = bpf_fill_atomic32_add,
  13089		.stack_depth = 8,
  13090		.nr_testruns = NR_PATTERN_RUNS,
  13091	},
  13092	{
  13093		"ATOMIC_W_AND: all operand magnitudes",
  13094		{ },
  13095		INTERNAL | FLAG_NO_DATA,
  13096		{ },
  13097		{ { 0, 1 } },
  13098		.fill_helper = bpf_fill_atomic32_and,
  13099		.stack_depth = 8,
  13100		.nr_testruns = NR_PATTERN_RUNS,
  13101	},
  13102	{
  13103		"ATOMIC_W_OR: all operand magnitudes",
  13104		{ },
  13105		INTERNAL | FLAG_NO_DATA,
  13106		{ },
  13107		{ { 0, 1 } },
  13108		.fill_helper = bpf_fill_atomic32_or,
  13109		.stack_depth = 8,
  13110		.nr_testruns = NR_PATTERN_RUNS,
  13111	},
  13112	{
  13113		"ATOMIC_W_XOR: all operand magnitudes",
  13114		{ },
  13115		INTERNAL | FLAG_NO_DATA,
  13116		{ },
  13117		{ { 0, 1 } },
  13118		.fill_helper = bpf_fill_atomic32_xor,
  13119		.stack_depth = 8,
  13120		.nr_testruns = NR_PATTERN_RUNS,
  13121	},
  13122	{
  13123		"ATOMIC_W_ADD_FETCH: all operand magnitudes",
  13124		{ },
  13125		INTERNAL | FLAG_NO_DATA,
  13126		{ },
  13127		{ { 0, 1 } },
  13128		.fill_helper = bpf_fill_atomic32_add_fetch,
  13129		.stack_depth = 8,
  13130		.nr_testruns = NR_PATTERN_RUNS,
  13131	},
  13132	{
  13133		"ATOMIC_W_AND_FETCH: all operand magnitudes",
  13134		{ },
  13135		INTERNAL | FLAG_NO_DATA,
  13136		{ },
  13137		{ { 0, 1 } },
  13138		.fill_helper = bpf_fill_atomic32_and_fetch,
  13139		.stack_depth = 8,
  13140		.nr_testruns = NR_PATTERN_RUNS,
  13141	},
  13142	{
  13143		"ATOMIC_W_OR_FETCH: all operand magnitudes",
  13144		{ },
  13145		INTERNAL | FLAG_NO_DATA,
  13146		{ },
  13147		{ { 0, 1 } },
  13148		.fill_helper = bpf_fill_atomic32_or_fetch,
  13149		.stack_depth = 8,
  13150		.nr_testruns = NR_PATTERN_RUNS,
  13151	},
  13152	{
  13153		"ATOMIC_W_XOR_FETCH: all operand magnitudes",
  13154		{ },
  13155		INTERNAL | FLAG_NO_DATA,
  13156		{ },
  13157		{ { 0, 1 } },
  13158		.fill_helper = bpf_fill_atomic32_xor_fetch,
  13159		.stack_depth = 8,
  13160		.nr_testruns = NR_PATTERN_RUNS,
  13161	},
  13162	{
  13163		"ATOMIC_W_XCHG: all operand magnitudes",
  13164		{ },
  13165		INTERNAL | FLAG_NO_DATA,
  13166		{ },
  13167		{ { 0, 1 } },
  13168		.fill_helper = bpf_fill_atomic32_xchg,
  13169		.stack_depth = 8,
  13170		.nr_testruns = NR_PATTERN_RUNS,
  13171	},
  13172	{
  13173		"ATOMIC_W_CMPXCHG: all operand magnitudes",
  13174		{ },
  13175		INTERNAL | FLAG_NO_DATA,
  13176		{ },
  13177		{ { 0, 1 } },
  13178		.fill_helper = bpf_fill_cmpxchg32,
  13179		.stack_depth = 8,
  13180		.nr_testruns = NR_PATTERN_RUNS,
  13181	},
  13182	/* JMP immediate magnitudes */
  13183	{
  13184		"JMP_JSET_K: all immediate value magnitudes",
  13185		{ },
  13186		INTERNAL | FLAG_NO_DATA,
  13187		{ },
  13188		{ { 0, 1 } },
  13189		.fill_helper = bpf_fill_jmp_jset_imm,
  13190		.nr_testruns = NR_PATTERN_RUNS,
  13191	},
  13192	{
  13193		"JMP_JEQ_K: all immediate value magnitudes",
  13194		{ },
  13195		INTERNAL | FLAG_NO_DATA,
  13196		{ },
  13197		{ { 0, 1 } },
  13198		.fill_helper = bpf_fill_jmp_jeq_imm,
  13199		.nr_testruns = NR_PATTERN_RUNS,
  13200	},
  13201	{
  13202		"JMP_JNE_K: all immediate value magnitudes",
  13203		{ },
  13204		INTERNAL | FLAG_NO_DATA,
  13205		{ },
  13206		{ { 0, 1 } },
  13207		.fill_helper = bpf_fill_jmp_jne_imm,
  13208		.nr_testruns = NR_PATTERN_RUNS,
  13209	},
  13210	{
  13211		"JMP_JGT_K: all immediate value magnitudes",
  13212		{ },
  13213		INTERNAL | FLAG_NO_DATA,
  13214		{ },
  13215		{ { 0, 1 } },
  13216		.fill_helper = bpf_fill_jmp_jgt_imm,
  13217		.nr_testruns = NR_PATTERN_RUNS,
  13218	},
  13219	{
  13220		"JMP_JGE_K: all immediate value magnitudes",
  13221		{ },
  13222		INTERNAL | FLAG_NO_DATA,
  13223		{ },
  13224		{ { 0, 1 } },
  13225		.fill_helper = bpf_fill_jmp_jge_imm,
  13226		.nr_testruns = NR_PATTERN_RUNS,
  13227	},
  13228	{
  13229		"JMP_JLT_K: all immediate value magnitudes",
  13230		{ },
  13231		INTERNAL | FLAG_NO_DATA,
  13232		{ },
  13233		{ { 0, 1 } },
  13234		.fill_helper = bpf_fill_jmp_jlt_imm,
  13235		.nr_testruns = NR_PATTERN_RUNS,
  13236	},
  13237	{
  13238		"JMP_JLE_K: all immediate value magnitudes",
  13239		{ },
  13240		INTERNAL | FLAG_NO_DATA,
  13241		{ },
  13242		{ { 0, 1 } },
  13243		.fill_helper = bpf_fill_jmp_jle_imm,
  13244		.nr_testruns = NR_PATTERN_RUNS,
  13245	},
  13246	{
  13247		"JMP_JSGT_K: all immediate value magnitudes",
  13248		{ },
  13249		INTERNAL | FLAG_NO_DATA,
  13250		{ },
  13251		{ { 0, 1 } },
  13252		.fill_helper = bpf_fill_jmp_jsgt_imm,
  13253		.nr_testruns = NR_PATTERN_RUNS,
  13254	},
  13255	{
  13256		"JMP_JSGE_K: all immediate value magnitudes",
  13257		{ },
  13258		INTERNAL | FLAG_NO_DATA,
  13259		{ },
  13260		{ { 0, 1 } },
  13261		.fill_helper = bpf_fill_jmp_jsge_imm,
  13262		.nr_testruns = NR_PATTERN_RUNS,
  13263	},
  13264	{
  13265		"JMP_JSLT_K: all immediate value magnitudes",
  13266		{ },
  13267		INTERNAL | FLAG_NO_DATA,
  13268		{ },
  13269		{ { 0, 1 } },
  13270		.fill_helper = bpf_fill_jmp_jslt_imm,
  13271		.nr_testruns = NR_PATTERN_RUNS,
  13272	},
  13273	{
  13274		"JMP_JSLE_K: all immediate value magnitudes",
  13275		{ },
  13276		INTERNAL | FLAG_NO_DATA,
  13277		{ },
  13278		{ { 0, 1 } },
  13279		.fill_helper = bpf_fill_jmp_jsle_imm,
  13280		.nr_testruns = NR_PATTERN_RUNS,
  13281	},
  13282	/* JMP register magnitudes */
  13283	{
  13284		"JMP_JSET_X: all register value magnitudes",
  13285		{ },
  13286		INTERNAL | FLAG_NO_DATA,
  13287		{ },
  13288		{ { 0, 1 } },
  13289		.fill_helper = bpf_fill_jmp_jset_reg,
  13290		.nr_testruns = NR_PATTERN_RUNS,
  13291	},
  13292	{
  13293		"JMP_JEQ_X: all register value magnitudes",
  13294		{ },
  13295		INTERNAL | FLAG_NO_DATA,
  13296		{ },
  13297		{ { 0, 1 } },
  13298		.fill_helper = bpf_fill_jmp_jeq_reg,
  13299		.nr_testruns = NR_PATTERN_RUNS,
  13300	},
  13301	{
  13302		"JMP_JNE_X: all register value magnitudes",
  13303		{ },
  13304		INTERNAL | FLAG_NO_DATA,
  13305		{ },
  13306		{ { 0, 1 } },
  13307		.fill_helper = bpf_fill_jmp_jne_reg,
  13308		.nr_testruns = NR_PATTERN_RUNS,
  13309	},
  13310	{
  13311		"JMP_JGT_X: all register value magnitudes",
  13312		{ },
  13313		INTERNAL | FLAG_NO_DATA,
  13314		{ },
  13315		{ { 0, 1 } },
  13316		.fill_helper = bpf_fill_jmp_jgt_reg,
  13317		.nr_testruns = NR_PATTERN_RUNS,
  13318	},
  13319	{
  13320		"JMP_JGE_X: all register value magnitudes",
  13321		{ },
  13322		INTERNAL | FLAG_NO_DATA,
  13323		{ },
  13324		{ { 0, 1 } },
  13325		.fill_helper = bpf_fill_jmp_jge_reg,
  13326		.nr_testruns = NR_PATTERN_RUNS,
  13327	},
  13328	{
  13329		"JMP_JLT_X: all register value magnitudes",
  13330		{ },
  13331		INTERNAL | FLAG_NO_DATA,
  13332		{ },
  13333		{ { 0, 1 } },
  13334		.fill_helper = bpf_fill_jmp_jlt_reg,
  13335		.nr_testruns = NR_PATTERN_RUNS,
  13336	},
  13337	{
  13338		"JMP_JLE_X: all register value magnitudes",
  13339		{ },
  13340		INTERNAL | FLAG_NO_DATA,
  13341		{ },
  13342		{ { 0, 1 } },
  13343		.fill_helper = bpf_fill_jmp_jle_reg,
  13344		.nr_testruns = NR_PATTERN_RUNS,
  13345	},
  13346	{
  13347		"JMP_JSGT_X: all register value magnitudes",
  13348		{ },
  13349		INTERNAL | FLAG_NO_DATA,
  13350		{ },
  13351		{ { 0, 1 } },
  13352		.fill_helper = bpf_fill_jmp_jsgt_reg,
  13353		.nr_testruns = NR_PATTERN_RUNS,
  13354	},
  13355	{
  13356		"JMP_JSGE_X: all register value magnitudes",
  13357		{ },
  13358		INTERNAL | FLAG_NO_DATA,
  13359		{ },
  13360		{ { 0, 1 } },
  13361		.fill_helper = bpf_fill_jmp_jsge_reg,
  13362		.nr_testruns = NR_PATTERN_RUNS,
  13363	},
  13364	{
  13365		"JMP_JSLT_X: all register value magnitudes",
  13366		{ },
  13367		INTERNAL | FLAG_NO_DATA,
  13368		{ },
  13369		{ { 0, 1 } },
  13370		.fill_helper = bpf_fill_jmp_jslt_reg,
  13371		.nr_testruns = NR_PATTERN_RUNS,
  13372	},
  13373	{
  13374		"JMP_JSLE_X: all register value magnitudes",
  13375		{ },
  13376		INTERNAL | FLAG_NO_DATA,
  13377		{ },
  13378		{ { 0, 1 } },
  13379		.fill_helper = bpf_fill_jmp_jsle_reg,
  13380		.nr_testruns = NR_PATTERN_RUNS,
  13381	},
  13382	/* JMP32 immediate magnitudes */
  13383	{
  13384		"JMP32_JSET_K: all immediate value magnitudes",
  13385		{ },
  13386		INTERNAL | FLAG_NO_DATA,
  13387		{ },
  13388		{ { 0, 1 } },
  13389		.fill_helper = bpf_fill_jmp32_jset_imm,
  13390		.nr_testruns = NR_PATTERN_RUNS,
  13391	},
  13392	{
  13393		"JMP32_JEQ_K: all immediate value magnitudes",
  13394		{ },
  13395		INTERNAL | FLAG_NO_DATA,
  13396		{ },
  13397		{ { 0, 1 } },
  13398		.fill_helper = bpf_fill_jmp32_jeq_imm,
  13399		.nr_testruns = NR_PATTERN_RUNS,
  13400	},
  13401	{
  13402		"JMP32_JNE_K: all immediate value magnitudes",
  13403		{ },
  13404		INTERNAL | FLAG_NO_DATA,
  13405		{ },
  13406		{ { 0, 1 } },
  13407		.fill_helper = bpf_fill_jmp32_jne_imm,
  13408		.nr_testruns = NR_PATTERN_RUNS,
  13409	},
  13410	{
  13411		"JMP32_JGT_K: all immediate value magnitudes",
  13412		{ },
  13413		INTERNAL | FLAG_NO_DATA,
  13414		{ },
  13415		{ { 0, 1 } },
  13416		.fill_helper = bpf_fill_jmp32_jgt_imm,
  13417		.nr_testruns = NR_PATTERN_RUNS,
  13418	},
  13419	{
  13420		"JMP32_JGE_K: all immediate value magnitudes",
  13421		{ },
  13422		INTERNAL | FLAG_NO_DATA,
  13423		{ },
  13424		{ { 0, 1 } },
  13425		.fill_helper = bpf_fill_jmp32_jge_imm,
  13426		.nr_testruns = NR_PATTERN_RUNS,
  13427	},
  13428	{
  13429		"JMP32_JLT_K: all immediate value magnitudes",
  13430		{ },
  13431		INTERNAL | FLAG_NO_DATA,
  13432		{ },
  13433		{ { 0, 1 } },
  13434		.fill_helper = bpf_fill_jmp32_jlt_imm,
  13435		.nr_testruns = NR_PATTERN_RUNS,
  13436	},
  13437	{
  13438		"JMP32_JLE_K: all immediate value magnitudes",
  13439		{ },
  13440		INTERNAL | FLAG_NO_DATA,
  13441		{ },
  13442		{ { 0, 1 } },
  13443		.fill_helper = bpf_fill_jmp32_jle_imm,
  13444		.nr_testruns = NR_PATTERN_RUNS,
  13445	},
  13446	{
  13447		"JMP32_JSGT_K: all immediate value magnitudes",
  13448		{ },
  13449		INTERNAL | FLAG_NO_DATA,
  13450		{ },
  13451		{ { 0, 1 } },
  13452		.fill_helper = bpf_fill_jmp32_jsgt_imm,
  13453		.nr_testruns = NR_PATTERN_RUNS,
  13454	},
  13455	{
  13456		"JMP32_JSGE_K: all immediate value magnitudes",
  13457		{ },
  13458		INTERNAL | FLAG_NO_DATA,
  13459		{ },
  13460		{ { 0, 1 } },
  13461		.fill_helper = bpf_fill_jmp32_jsge_imm,
  13462		.nr_testruns = NR_PATTERN_RUNS,
  13463	},
  13464	{
  13465		"JMP32_JSLT_K: all immediate value magnitudes",
  13466		{ },
  13467		INTERNAL | FLAG_NO_DATA,
  13468		{ },
  13469		{ { 0, 1 } },
  13470		.fill_helper = bpf_fill_jmp32_jslt_imm,
  13471		.nr_testruns = NR_PATTERN_RUNS,
  13472	},
  13473	{
  13474		"JMP32_JSLE_K: all immediate value magnitudes",
  13475		{ },
  13476		INTERNAL | FLAG_NO_DATA,
  13477		{ },
  13478		{ { 0, 1 } },
  13479		.fill_helper = bpf_fill_jmp32_jsle_imm,
  13480		.nr_testruns = NR_PATTERN_RUNS,
  13481	},
  13482	/* JMP32 register magnitudes */
  13483	{
  13484		"JMP32_JSET_X: all register value magnitudes",
  13485		{ },
  13486		INTERNAL | FLAG_NO_DATA,
  13487		{ },
  13488		{ { 0, 1 } },
  13489		.fill_helper = bpf_fill_jmp32_jset_reg,
  13490		.nr_testruns = NR_PATTERN_RUNS,
  13491	},
  13492	{
  13493		"JMP32_JEQ_X: all register value magnitudes",
  13494		{ },
  13495		INTERNAL | FLAG_NO_DATA,
  13496		{ },
  13497		{ { 0, 1 } },
  13498		.fill_helper = bpf_fill_jmp32_jeq_reg,
  13499		.nr_testruns = NR_PATTERN_RUNS,
  13500	},
  13501	{
  13502		"JMP32_JNE_X: all register value magnitudes",
  13503		{ },
  13504		INTERNAL | FLAG_NO_DATA,
  13505		{ },
  13506		{ { 0, 1 } },
  13507		.fill_helper = bpf_fill_jmp32_jne_reg,
  13508		.nr_testruns = NR_PATTERN_RUNS,
  13509	},
  13510	{
  13511		"JMP32_JGT_X: all register value magnitudes",
  13512		{ },
  13513		INTERNAL | FLAG_NO_DATA,
  13514		{ },
  13515		{ { 0, 1 } },
  13516		.fill_helper = bpf_fill_jmp32_jgt_reg,
  13517		.nr_testruns = NR_PATTERN_RUNS,
  13518	},
  13519	{
  13520		"JMP32_JGE_X: all register value magnitudes",
  13521		{ },
  13522		INTERNAL | FLAG_NO_DATA,
  13523		{ },
  13524		{ { 0, 1 } },
  13525		.fill_helper = bpf_fill_jmp32_jge_reg,
  13526		.nr_testruns = NR_PATTERN_RUNS,
  13527	},
  13528	{
  13529		"JMP32_JLT_X: all register value magnitudes",
  13530		{ },
  13531		INTERNAL | FLAG_NO_DATA,
  13532		{ },
  13533		{ { 0, 1 } },
  13534		.fill_helper = bpf_fill_jmp32_jlt_reg,
  13535		.nr_testruns = NR_PATTERN_RUNS,
  13536	},
  13537	{
  13538		"JMP32_JLE_X: all register value magnitudes",
  13539		{ },
  13540		INTERNAL | FLAG_NO_DATA,
  13541		{ },
  13542		{ { 0, 1 } },
  13543		.fill_helper = bpf_fill_jmp32_jle_reg,
  13544		.nr_testruns = NR_PATTERN_RUNS,
  13545	},
  13546	{
  13547		"JMP32_JSGT_X: all register value magnitudes",
  13548		{ },
  13549		INTERNAL | FLAG_NO_DATA,
  13550		{ },
  13551		{ { 0, 1 } },
  13552		.fill_helper = bpf_fill_jmp32_jsgt_reg,
  13553		.nr_testruns = NR_PATTERN_RUNS,
  13554	},
  13555	{
  13556		"JMP32_JSGE_X: all register value magnitudes",
  13557		{ },
  13558		INTERNAL | FLAG_NO_DATA,
  13559		{ },
  13560		{ { 0, 1 } },
  13561		.fill_helper = bpf_fill_jmp32_jsge_reg,
  13562		.nr_testruns = NR_PATTERN_RUNS,
  13563	},
  13564	{
  13565		"JMP32_JSLT_X: all register value magnitudes",
  13566		{ },
  13567		INTERNAL | FLAG_NO_DATA,
  13568		{ },
  13569		{ { 0, 1 } },
  13570		.fill_helper = bpf_fill_jmp32_jslt_reg,
  13571		.nr_testruns = NR_PATTERN_RUNS,
  13572	},
  13573	{
  13574		"JMP32_JSLE_X: all register value magnitudes",
  13575		{ },
  13576		INTERNAL | FLAG_NO_DATA,
  13577		{ },
  13578		{ { 0, 1 } },
  13579		.fill_helper = bpf_fill_jmp32_jsle_reg,
  13580		.nr_testruns = NR_PATTERN_RUNS,
  13581	},
  13582	/* Conditional jumps with constant decision */
  13583	{
  13584		"JMP_JSET_K: imm = 0 -> never taken",
  13585		.u.insns_int = {
  13586			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13587			BPF_JMP_IMM(BPF_JSET, R1, 0, 1),
  13588			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13589			BPF_EXIT_INSN(),
  13590		},
  13591		INTERNAL | FLAG_NO_DATA,
  13592		{ },
  13593		{ { 0, 0 } },
  13594	},
  13595	{
  13596		"JMP_JLT_K: imm = 0 -> never taken",
  13597		.u.insns_int = {
  13598			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13599			BPF_JMP_IMM(BPF_JLT, R1, 0, 1),
  13600			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13601			BPF_EXIT_INSN(),
  13602		},
  13603		INTERNAL | FLAG_NO_DATA,
  13604		{ },
  13605		{ { 0, 0 } },
  13606	},
  13607	{
  13608		"JMP_JGE_K: imm = 0 -> always taken",
  13609		.u.insns_int = {
  13610			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13611			BPF_JMP_IMM(BPF_JGE, R1, 0, 1),
  13612			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13613			BPF_EXIT_INSN(),
  13614		},
  13615		INTERNAL | FLAG_NO_DATA,
  13616		{ },
  13617		{ { 0, 1 } },
  13618	},
  13619	{
  13620		"JMP_JGT_K: imm = 0xffffffff -> never taken",
  13621		.u.insns_int = {
  13622			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13623			BPF_JMP_IMM(BPF_JGT, R1, U32_MAX, 1),
  13624			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13625			BPF_EXIT_INSN(),
  13626		},
  13627		INTERNAL | FLAG_NO_DATA,
  13628		{ },
  13629		{ { 0, 0 } },
  13630	},
  13631	{
  13632		"JMP_JLE_K: imm = 0xffffffff -> always taken",
  13633		.u.insns_int = {
  13634			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13635			BPF_JMP_IMM(BPF_JLE, R1, U32_MAX, 1),
  13636			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13637			BPF_EXIT_INSN(),
  13638		},
  13639		INTERNAL | FLAG_NO_DATA,
  13640		{ },
  13641		{ { 0, 1 } },
  13642	},
  13643	{
  13644		"JMP32_JSGT_K: imm = 0x7fffffff -> never taken",
  13645		.u.insns_int = {
  13646			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13647			BPF_JMP32_IMM(BPF_JSGT, R1, S32_MAX, 1),
  13648			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13649			BPF_EXIT_INSN(),
  13650		},
  13651		INTERNAL | FLAG_NO_DATA,
  13652		{ },
  13653		{ { 0, 0 } },
  13654	},
  13655	{
  13656		"JMP32_JSGE_K: imm = -0x80000000 -> always taken",
  13657		.u.insns_int = {
  13658			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13659			BPF_JMP32_IMM(BPF_JSGE, R1, S32_MIN, 1),
  13660			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13661			BPF_EXIT_INSN(),
  13662		},
  13663		INTERNAL | FLAG_NO_DATA,
  13664		{ },
  13665		{ { 0, 1 } },
  13666	},
  13667	{
  13668		"JMP32_JSLT_K: imm = -0x80000000 -> never taken",
  13669		.u.insns_int = {
  13670			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13671			BPF_JMP32_IMM(BPF_JSLT, R1, S32_MIN, 1),
  13672			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13673			BPF_EXIT_INSN(),
  13674		},
  13675		INTERNAL | FLAG_NO_DATA,
  13676		{ },
  13677		{ { 0, 0 } },
  13678	},
  13679	{
  13680		"JMP32_JSLE_K: imm = 0x7fffffff -> always taken",
  13681		.u.insns_int = {
  13682			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13683			BPF_JMP32_IMM(BPF_JSLE, R1, S32_MAX, 1),
  13684			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13685			BPF_EXIT_INSN(),
  13686		},
  13687		INTERNAL | FLAG_NO_DATA,
  13688		{ },
  13689		{ { 0, 1 } },
  13690	},
  13691	{
  13692		"JMP_JEQ_X: dst = src -> always taken",
  13693		.u.insns_int = {
  13694			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13695			BPF_JMP_REG(BPF_JEQ, R1, R1, 1),
  13696			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13697			BPF_EXIT_INSN(),
  13698		},
  13699		INTERNAL | FLAG_NO_DATA,
  13700		{ },
  13701		{ { 0, 1 } },
  13702	},
  13703	{
  13704		"JMP_JGE_X: dst = src -> always taken",
  13705		.u.insns_int = {
  13706			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13707			BPF_JMP_REG(BPF_JGE, R1, R1, 1),
  13708			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13709			BPF_EXIT_INSN(),
  13710		},
  13711		INTERNAL | FLAG_NO_DATA,
  13712		{ },
  13713		{ { 0, 1 } },
  13714	},
  13715	{
  13716		"JMP_JLE_X: dst = src -> always taken",
  13717		.u.insns_int = {
  13718			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13719			BPF_JMP_REG(BPF_JLE, R1, R1, 1),
  13720			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13721			BPF_EXIT_INSN(),
  13722		},
  13723		INTERNAL | FLAG_NO_DATA,
  13724		{ },
  13725		{ { 0, 1 } },
  13726	},
  13727	{
  13728		"JMP_JSGE_X: dst = src -> always taken",
  13729		.u.insns_int = {
  13730			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13731			BPF_JMP_REG(BPF_JSGE, R1, R1, 1),
  13732			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13733			BPF_EXIT_INSN(),
  13734		},
  13735		INTERNAL | FLAG_NO_DATA,
  13736		{ },
  13737		{ { 0, 1 } },
  13738	},
  13739	{
  13740		"JMP_JSLE_X: dst = src -> always taken",
  13741		.u.insns_int = {
  13742			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13743			BPF_JMP_REG(BPF_JSLE, R1, R1, 1),
  13744			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13745			BPF_EXIT_INSN(),
  13746		},
  13747		INTERNAL | FLAG_NO_DATA,
  13748		{ },
  13749		{ { 0, 1 } },
  13750	},
  13751	{
  13752		"JMP_JNE_X: dst = src -> never taken",
  13753		.u.insns_int = {
  13754			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13755			BPF_JMP_REG(BPF_JNE, R1, R1, 1),
  13756			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13757			BPF_EXIT_INSN(),
  13758		},
  13759		INTERNAL | FLAG_NO_DATA,
  13760		{ },
  13761		{ { 0, 0 } },
  13762	},
  13763	{
  13764		"JMP_JGT_X: dst = src -> never taken",
  13765		.u.insns_int = {
  13766			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13767			BPF_JMP_REG(BPF_JGT, R1, R1, 1),
  13768			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13769			BPF_EXIT_INSN(),
  13770		},
  13771		INTERNAL | FLAG_NO_DATA,
  13772		{ },
  13773		{ { 0, 0 } },
  13774	},
  13775	{
  13776		"JMP_JLT_X: dst = src -> never taken",
  13777		.u.insns_int = {
  13778			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13779			BPF_JMP_REG(BPF_JLT, R1, R1, 1),
  13780			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13781			BPF_EXIT_INSN(),
  13782		},
  13783		INTERNAL | FLAG_NO_DATA,
  13784		{ },
  13785		{ { 0, 0 } },
  13786	},
  13787	{
  13788		"JMP_JSGT_X: dst = src -> never taken",
  13789		.u.insns_int = {
  13790			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13791			BPF_JMP_REG(BPF_JSGT, R1, R1, 1),
  13792			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13793			BPF_EXIT_INSN(),
  13794		},
  13795		INTERNAL | FLAG_NO_DATA,
  13796		{ },
  13797		{ { 0, 0 } },
  13798	},
  13799	{
  13800		"JMP_JSLT_X: dst = src -> never taken",
  13801		.u.insns_int = {
  13802			BPF_ALU64_IMM(BPF_MOV, R0, 1),
  13803			BPF_JMP_REG(BPF_JSLT, R1, R1, 1),
  13804			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13805			BPF_EXIT_INSN(),
  13806		},
  13807		INTERNAL | FLAG_NO_DATA,
  13808		{ },
  13809		{ { 0, 0 } },
  13810	},
  13811	/* Short relative jumps */
  13812	{
  13813		"Short relative jump: offset=0",
  13814		.u.insns_int = {
  13815			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13816			BPF_JMP_IMM(BPF_JEQ, R0, 0, 0),
  13817			BPF_EXIT_INSN(),
  13818			BPF_ALU32_IMM(BPF_MOV, R0, -1),
  13819		},
  13820		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
  13821		{ },
  13822		{ { 0, 0 } },
  13823	},
  13824	{
  13825		"Short relative jump: offset=1",
  13826		.u.insns_int = {
  13827			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13828			BPF_JMP_IMM(BPF_JEQ, R0, 0, 1),
  13829			BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13830			BPF_EXIT_INSN(),
  13831			BPF_ALU32_IMM(BPF_MOV, R0, -1),
  13832		},
  13833		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
  13834		{ },
  13835		{ { 0, 0 } },
  13836	},
  13837	{
  13838		"Short relative jump: offset=2",
  13839		.u.insns_int = {
  13840			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13841			BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
  13842			BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13843			BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13844			BPF_EXIT_INSN(),
  13845			BPF_ALU32_IMM(BPF_MOV, R0, -1),
  13846		},
  13847		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
  13848		{ },
  13849		{ { 0, 0 } },
  13850	},
  13851	{
  13852		"Short relative jump: offset=3",
  13853		.u.insns_int = {
  13854			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13855			BPF_JMP_IMM(BPF_JEQ, R0, 0, 3),
  13856			BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13857			BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13858			BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13859			BPF_EXIT_INSN(),
  13860			BPF_ALU32_IMM(BPF_MOV, R0, -1),
  13861		},
  13862		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
  13863		{ },
  13864		{ { 0, 0 } },
  13865	},
  13866	{
  13867		"Short relative jump: offset=4",
  13868		.u.insns_int = {
  13869			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  13870			BPF_JMP_IMM(BPF_JEQ, R0, 0, 4),
  13871			BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13872			BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13873			BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13874			BPF_ALU32_IMM(BPF_ADD, R0, 1),
  13875			BPF_EXIT_INSN(),
  13876			BPF_ALU32_IMM(BPF_MOV, R0, -1),
  13877		},
  13878		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
  13879		{ },
  13880		{ { 0, 0 } },
  13881	},
  13882	/* Conditional branch conversions */
  13883	{
  13884		"Long conditional jump: taken at runtime",
  13885		{ },
  13886		INTERNAL | FLAG_NO_DATA,
  13887		{ },
  13888		{ { 0, 1 } },
  13889		.fill_helper = bpf_fill_max_jmp_taken,
  13890	},
  13891	{
  13892		"Long conditional jump: not taken at runtime",
  13893		{ },
  13894		INTERNAL | FLAG_NO_DATA,
  13895		{ },
  13896		{ { 0, 2 } },
  13897		.fill_helper = bpf_fill_max_jmp_not_taken,
  13898	},
  13899	{
  13900		"Long conditional jump: always taken, known at JIT time",
  13901		{ },
  13902		INTERNAL | FLAG_NO_DATA,
  13903		{ },
  13904		{ { 0, 1 } },
  13905		.fill_helper = bpf_fill_max_jmp_always_taken,
  13906	},
  13907	{
  13908		"Long conditional jump: never taken, known at JIT time",
  13909		{ },
  13910		INTERNAL | FLAG_NO_DATA,
  13911		{ },
  13912		{ { 0, 2 } },
  13913		.fill_helper = bpf_fill_max_jmp_never_taken,
  13914	},
  13915	/* Staggered jump sequences, immediate */
  13916	{
  13917		"Staggered jumps: JMP_JA",
  13918		{ },
  13919		INTERNAL | FLAG_NO_DATA,
  13920		{ },
  13921		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13922		.fill_helper = bpf_fill_staggered_ja,
  13923		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  13924	},
  13925	{
  13926		"Staggered jumps: JMP_JEQ_K",
  13927		{ },
  13928		INTERNAL | FLAG_NO_DATA,
  13929		{ },
  13930		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13931		.fill_helper = bpf_fill_staggered_jeq_imm,
  13932		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  13933	},
  13934	{
  13935		"Staggered jumps: JMP_JNE_K",
  13936		{ },
  13937		INTERNAL | FLAG_NO_DATA,
  13938		{ },
  13939		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13940		.fill_helper = bpf_fill_staggered_jne_imm,
  13941		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  13942	},
  13943	{
  13944		"Staggered jumps: JMP_JSET_K",
  13945		{ },
  13946		INTERNAL | FLAG_NO_DATA,
  13947		{ },
  13948		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13949		.fill_helper = bpf_fill_staggered_jset_imm,
  13950		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  13951	},
  13952	{
  13953		"Staggered jumps: JMP_JGT_K",
  13954		{ },
  13955		INTERNAL | FLAG_NO_DATA,
  13956		{ },
  13957		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13958		.fill_helper = bpf_fill_staggered_jgt_imm,
  13959		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  13960	},
  13961	{
  13962		"Staggered jumps: JMP_JGE_K",
  13963		{ },
  13964		INTERNAL | FLAG_NO_DATA,
  13965		{ },
  13966		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13967		.fill_helper = bpf_fill_staggered_jge_imm,
  13968		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  13969	},
  13970	{
  13971		"Staggered jumps: JMP_JLT_K",
  13972		{ },
  13973		INTERNAL | FLAG_NO_DATA,
  13974		{ },
  13975		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13976		.fill_helper = bpf_fill_staggered_jlt_imm,
  13977		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  13978	},
  13979	{
  13980		"Staggered jumps: JMP_JLE_K",
  13981		{ },
  13982		INTERNAL | FLAG_NO_DATA,
  13983		{ },
  13984		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13985		.fill_helper = bpf_fill_staggered_jle_imm,
  13986		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  13987	},
  13988	{
  13989		"Staggered jumps: JMP_JSGT_K",
  13990		{ },
  13991		INTERNAL | FLAG_NO_DATA,
  13992		{ },
  13993		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  13994		.fill_helper = bpf_fill_staggered_jsgt_imm,
  13995		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  13996	},
  13997	{
  13998		"Staggered jumps: JMP_JSGE_K",
  13999		{ },
  14000		INTERNAL | FLAG_NO_DATA,
  14001		{ },
  14002		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14003		.fill_helper = bpf_fill_staggered_jsge_imm,
  14004		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14005	},
  14006	{
  14007		"Staggered jumps: JMP_JSLT_K",
  14008		{ },
  14009		INTERNAL | FLAG_NO_DATA,
  14010		{ },
  14011		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14012		.fill_helper = bpf_fill_staggered_jslt_imm,
  14013		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14014	},
  14015	{
  14016		"Staggered jumps: JMP_JSLE_K",
  14017		{ },
  14018		INTERNAL | FLAG_NO_DATA,
  14019		{ },
  14020		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14021		.fill_helper = bpf_fill_staggered_jsle_imm,
  14022		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14023	},
  14024	/* Staggered jump sequences, register */
  14025	{
  14026		"Staggered jumps: JMP_JEQ_X",
  14027		{ },
  14028		INTERNAL | FLAG_NO_DATA,
  14029		{ },
  14030		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14031		.fill_helper = bpf_fill_staggered_jeq_reg,
  14032		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14033	},
  14034	{
  14035		"Staggered jumps: JMP_JNE_X",
  14036		{ },
  14037		INTERNAL | FLAG_NO_DATA,
  14038		{ },
  14039		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14040		.fill_helper = bpf_fill_staggered_jne_reg,
  14041		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14042	},
  14043	{
  14044		"Staggered jumps: JMP_JSET_X",
  14045		{ },
  14046		INTERNAL | FLAG_NO_DATA,
  14047		{ },
  14048		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14049		.fill_helper = bpf_fill_staggered_jset_reg,
  14050		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14051	},
  14052	{
  14053		"Staggered jumps: JMP_JGT_X",
  14054		{ },
  14055		INTERNAL | FLAG_NO_DATA,
  14056		{ },
  14057		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14058		.fill_helper = bpf_fill_staggered_jgt_reg,
  14059		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14060	},
  14061	{
  14062		"Staggered jumps: JMP_JGE_X",
  14063		{ },
  14064		INTERNAL | FLAG_NO_DATA,
  14065		{ },
  14066		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14067		.fill_helper = bpf_fill_staggered_jge_reg,
  14068		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14069	},
  14070	{
  14071		"Staggered jumps: JMP_JLT_X",
  14072		{ },
  14073		INTERNAL | FLAG_NO_DATA,
  14074		{ },
  14075		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14076		.fill_helper = bpf_fill_staggered_jlt_reg,
  14077		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14078	},
  14079	{
  14080		"Staggered jumps: JMP_JLE_X",
  14081		{ },
  14082		INTERNAL | FLAG_NO_DATA,
  14083		{ },
  14084		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14085		.fill_helper = bpf_fill_staggered_jle_reg,
  14086		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14087	},
  14088	{
  14089		"Staggered jumps: JMP_JSGT_X",
  14090		{ },
  14091		INTERNAL | FLAG_NO_DATA,
  14092		{ },
  14093		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14094		.fill_helper = bpf_fill_staggered_jsgt_reg,
  14095		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14096	},
  14097	{
  14098		"Staggered jumps: JMP_JSGE_X",
  14099		{ },
  14100		INTERNAL | FLAG_NO_DATA,
  14101		{ },
  14102		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14103		.fill_helper = bpf_fill_staggered_jsge_reg,
  14104		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14105	},
  14106	{
  14107		"Staggered jumps: JMP_JSLT_X",
  14108		{ },
  14109		INTERNAL | FLAG_NO_DATA,
  14110		{ },
  14111		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14112		.fill_helper = bpf_fill_staggered_jslt_reg,
  14113		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14114	},
  14115	{
  14116		"Staggered jumps: JMP_JSLE_X",
  14117		{ },
  14118		INTERNAL | FLAG_NO_DATA,
  14119		{ },
  14120		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14121		.fill_helper = bpf_fill_staggered_jsle_reg,
  14122		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14123	},
  14124	/* Staggered jump sequences, JMP32 immediate */
  14125	{
  14126		"Staggered jumps: JMP32_JEQ_K",
  14127		{ },
  14128		INTERNAL | FLAG_NO_DATA,
  14129		{ },
  14130		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14131		.fill_helper = bpf_fill_staggered_jeq32_imm,
  14132		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14133	},
  14134	{
  14135		"Staggered jumps: JMP32_JNE_K",
  14136		{ },
  14137		INTERNAL | FLAG_NO_DATA,
  14138		{ },
  14139		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14140		.fill_helper = bpf_fill_staggered_jne32_imm,
  14141		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14142	},
  14143	{
  14144		"Staggered jumps: JMP32_JSET_K",
  14145		{ },
  14146		INTERNAL | FLAG_NO_DATA,
  14147		{ },
  14148		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14149		.fill_helper = bpf_fill_staggered_jset32_imm,
  14150		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14151	},
  14152	{
  14153		"Staggered jumps: JMP32_JGT_K",
  14154		{ },
  14155		INTERNAL | FLAG_NO_DATA,
  14156		{ },
  14157		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14158		.fill_helper = bpf_fill_staggered_jgt32_imm,
  14159		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14160	},
  14161	{
  14162		"Staggered jumps: JMP32_JGE_K",
  14163		{ },
  14164		INTERNAL | FLAG_NO_DATA,
  14165		{ },
  14166		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14167		.fill_helper = bpf_fill_staggered_jge32_imm,
  14168		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14169	},
  14170	{
  14171		"Staggered jumps: JMP32_JLT_K",
  14172		{ },
  14173		INTERNAL | FLAG_NO_DATA,
  14174		{ },
  14175		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14176		.fill_helper = bpf_fill_staggered_jlt32_imm,
  14177		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14178	},
  14179	{
  14180		"Staggered jumps: JMP32_JLE_K",
  14181		{ },
  14182		INTERNAL | FLAG_NO_DATA,
  14183		{ },
  14184		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14185		.fill_helper = bpf_fill_staggered_jle32_imm,
  14186		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14187	},
  14188	{
  14189		"Staggered jumps: JMP32_JSGT_K",
  14190		{ },
  14191		INTERNAL | FLAG_NO_DATA,
  14192		{ },
  14193		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14194		.fill_helper = bpf_fill_staggered_jsgt32_imm,
  14195		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14196	},
  14197	{
  14198		"Staggered jumps: JMP32_JSGE_K",
  14199		{ },
  14200		INTERNAL | FLAG_NO_DATA,
  14201		{ },
  14202		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14203		.fill_helper = bpf_fill_staggered_jsge32_imm,
  14204		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14205	},
  14206	{
  14207		"Staggered jumps: JMP32_JSLT_K",
  14208		{ },
  14209		INTERNAL | FLAG_NO_DATA,
  14210		{ },
  14211		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14212		.fill_helper = bpf_fill_staggered_jslt32_imm,
  14213		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14214	},
  14215	{
  14216		"Staggered jumps: JMP32_JSLE_K",
  14217		{ },
  14218		INTERNAL | FLAG_NO_DATA,
  14219		{ },
  14220		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14221		.fill_helper = bpf_fill_staggered_jsle32_imm,
  14222		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14223	},
  14224	/* Staggered jump sequences, JMP32 register */
  14225	{
  14226		"Staggered jumps: JMP32_JEQ_X",
  14227		{ },
  14228		INTERNAL | FLAG_NO_DATA,
  14229		{ },
  14230		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14231		.fill_helper = bpf_fill_staggered_jeq32_reg,
  14232		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14233	},
  14234	{
  14235		"Staggered jumps: JMP32_JNE_X",
  14236		{ },
  14237		INTERNAL | FLAG_NO_DATA,
  14238		{ },
  14239		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14240		.fill_helper = bpf_fill_staggered_jne32_reg,
  14241		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14242	},
  14243	{
  14244		"Staggered jumps: JMP32_JSET_X",
  14245		{ },
  14246		INTERNAL | FLAG_NO_DATA,
  14247		{ },
  14248		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14249		.fill_helper = bpf_fill_staggered_jset32_reg,
  14250		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14251	},
  14252	{
  14253		"Staggered jumps: JMP32_JGT_X",
  14254		{ },
  14255		INTERNAL | FLAG_NO_DATA,
  14256		{ },
  14257		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14258		.fill_helper = bpf_fill_staggered_jgt32_reg,
  14259		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14260	},
  14261	{
  14262		"Staggered jumps: JMP32_JGE_X",
  14263		{ },
  14264		INTERNAL | FLAG_NO_DATA,
  14265		{ },
  14266		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14267		.fill_helper = bpf_fill_staggered_jge32_reg,
  14268		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14269	},
  14270	{
  14271		"Staggered jumps: JMP32_JLT_X",
  14272		{ },
  14273		INTERNAL | FLAG_NO_DATA,
  14274		{ },
  14275		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14276		.fill_helper = bpf_fill_staggered_jlt32_reg,
  14277		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14278	},
  14279	{
  14280		"Staggered jumps: JMP32_JLE_X",
  14281		{ },
  14282		INTERNAL | FLAG_NO_DATA,
  14283		{ },
  14284		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14285		.fill_helper = bpf_fill_staggered_jle32_reg,
  14286		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14287	},
  14288	{
  14289		"Staggered jumps: JMP32_JSGT_X",
  14290		{ },
  14291		INTERNAL | FLAG_NO_DATA,
  14292		{ },
  14293		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14294		.fill_helper = bpf_fill_staggered_jsgt32_reg,
  14295		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14296	},
  14297	{
  14298		"Staggered jumps: JMP32_JSGE_X",
  14299		{ },
  14300		INTERNAL | FLAG_NO_DATA,
  14301		{ },
  14302		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14303		.fill_helper = bpf_fill_staggered_jsge32_reg,
  14304		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14305	},
  14306	{
  14307		"Staggered jumps: JMP32_JSLT_X",
  14308		{ },
  14309		INTERNAL | FLAG_NO_DATA,
  14310		{ },
  14311		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14312		.fill_helper = bpf_fill_staggered_jslt32_reg,
  14313		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14314	},
  14315	{
  14316		"Staggered jumps: JMP32_JSLE_X",
  14317		{ },
  14318		INTERNAL | FLAG_NO_DATA,
  14319		{ },
  14320		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
  14321		.fill_helper = bpf_fill_staggered_jsle32_reg,
  14322		.nr_testruns = NR_STAGGERED_JMP_RUNS,
  14323	},
  14324};
  14325
  14326static struct net_device dev;
  14327
  14328static struct sk_buff *populate_skb(char *buf, int size)
  14329{
  14330	struct sk_buff *skb;
  14331
  14332	if (size >= MAX_DATA)
  14333		return NULL;
  14334
  14335	skb = alloc_skb(MAX_DATA, GFP_KERNEL);
  14336	if (!skb)
  14337		return NULL;
  14338
  14339	__skb_put_data(skb, buf, size);
  14340
  14341	/* Initialize a fake skb with test pattern. */
  14342	skb_reset_mac_header(skb);
  14343	skb->protocol = htons(ETH_P_IP);
  14344	skb->pkt_type = SKB_TYPE;
  14345	skb->mark = SKB_MARK;
  14346	skb->hash = SKB_HASH;
  14347	skb->queue_mapping = SKB_QUEUE_MAP;
  14348	skb->vlan_tci = SKB_VLAN_TCI;
  14349	skb->vlan_present = SKB_VLAN_PRESENT;
  14350	skb->vlan_proto = htons(ETH_P_IP);
  14351	dev_net_set(&dev, &init_net);
  14352	skb->dev = &dev;
  14353	skb->dev->ifindex = SKB_DEV_IFINDEX;
  14354	skb->dev->type = SKB_DEV_TYPE;
  14355	skb_set_network_header(skb, min(size, ETH_HLEN));
  14356
  14357	return skb;
  14358}
  14359
  14360static void *generate_test_data(struct bpf_test *test, int sub)
  14361{
  14362	struct sk_buff *skb;
  14363	struct page *page;
  14364
  14365	if (test->aux & FLAG_NO_DATA)
  14366		return NULL;
  14367
  14368	if (test->aux & FLAG_LARGE_MEM)
  14369		return kmalloc(test->test[sub].data_size, GFP_KERNEL);
  14370
  14371	/* Test case expects an skb, so populate one. Various
  14372	 * subtests generate skbs of different sizes based on
  14373	 * the same data.
  14374	 */
  14375	skb = populate_skb(test->data, test->test[sub].data_size);
  14376	if (!skb)
  14377		return NULL;
  14378
  14379	if (test->aux & FLAG_SKB_FRAG) {
  14380		/*
  14381		 * when the test requires a fragmented skb, add a
  14382		 * single fragment to the skb, filled with
  14383		 * test->frag_data.
  14384		 */
  14385		void *ptr;
  14386
  14387		page = alloc_page(GFP_KERNEL);
  14388
  14389		if (!page)
  14390			goto err_kfree_skb;
  14391
  14392		ptr = kmap(page);
  14393		if (!ptr)
  14394			goto err_free_page;
  14395		memcpy(ptr, test->frag_data, MAX_DATA);
  14396		kunmap(page);
  14397		skb_add_rx_frag(skb, 0, page, 0, MAX_DATA, MAX_DATA);
  14398	}
  14399
  14400	return skb;
  14401
  14402err_free_page:
  14403	__free_page(page);
  14404err_kfree_skb:
  14405	kfree_skb(skb);
  14406	return NULL;
  14407}
  14408
  14409static void release_test_data(const struct bpf_test *test, void *data)
  14410{
  14411	if (test->aux & FLAG_NO_DATA)
  14412		return;
  14413
  14414	if (test->aux & FLAG_LARGE_MEM)
  14415		kfree(data);
  14416	else
  14417		kfree_skb(data);
  14418}
  14419
  14420static int filter_length(int which)
  14421{
  14422	struct sock_filter *fp;
  14423	int len;
  14424
  14425	if (tests[which].fill_helper)
  14426		return tests[which].u.ptr.len;
  14427
  14428	fp = tests[which].u.insns;
  14429	for (len = MAX_INSNS - 1; len > 0; --len)
  14430		if (fp[len].code != 0 || fp[len].k != 0)
  14431			break;
  14432
  14433	return len + 1;
  14434}
  14435
  14436static void *filter_pointer(int which)
  14437{
  14438	if (tests[which].fill_helper)
  14439		return tests[which].u.ptr.insns;
  14440	else
  14441		return tests[which].u.insns;
  14442}
  14443
  14444static struct bpf_prog *generate_filter(int which, int *err)
  14445{
  14446	__u8 test_type = tests[which].aux & TEST_TYPE_MASK;
  14447	unsigned int flen = filter_length(which);
  14448	void *fptr = filter_pointer(which);
  14449	struct sock_fprog_kern fprog;
  14450	struct bpf_prog *fp;
  14451
  14452	switch (test_type) {
  14453	case CLASSIC:
  14454		fprog.filter = fptr;
  14455		fprog.len = flen;
  14456
  14457		*err = bpf_prog_create(&fp, &fprog);
  14458		if (tests[which].aux & FLAG_EXPECTED_FAIL) {
  14459			if (*err == tests[which].expected_errcode) {
  14460				pr_cont("PASS\n");
  14461				/* Verifier rejected filter as expected. */
  14462				*err = 0;
  14463				return NULL;
  14464			} else {
  14465				pr_cont("UNEXPECTED_PASS\n");
  14466				/* Verifier didn't reject the test that's
  14467				 * bad enough, just return!
  14468				 */
  14469				*err = -EINVAL;
  14470				return NULL;
  14471			}
  14472		}
  14473		if (*err) {
  14474			pr_cont("FAIL to prog_create err=%d len=%d\n",
  14475				*err, fprog.len);
  14476			return NULL;
  14477		}
  14478		break;
  14479
  14480	case INTERNAL:
  14481		fp = bpf_prog_alloc(bpf_prog_size(flen), 0);
  14482		if (fp == NULL) {
  14483			pr_cont("UNEXPECTED_FAIL no memory left\n");
  14484			*err = -ENOMEM;
  14485			return NULL;
  14486		}
  14487
  14488		fp->len = flen;
  14489		/* Type doesn't really matter here as long as it's not unspec. */
  14490		fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
  14491		memcpy(fp->insnsi, fptr, fp->len * sizeof(struct bpf_insn));
  14492		fp->aux->stack_depth = tests[which].stack_depth;
  14493		fp->aux->verifier_zext = !!(tests[which].aux &
  14494					    FLAG_VERIFIER_ZEXT);
  14495
  14496		/* We cannot error here as we don't need type compatibility
  14497		 * checks.
  14498		 */
  14499		fp = bpf_prog_select_runtime(fp, err);
  14500		if (*err) {
  14501			pr_cont("FAIL to select_runtime err=%d\n", *err);
  14502			return NULL;
  14503		}
  14504		break;
  14505	}
  14506
  14507	*err = 0;
  14508	return fp;
  14509}
  14510
  14511static void release_filter(struct bpf_prog *fp, int which)
  14512{
  14513	__u8 test_type = tests[which].aux & TEST_TYPE_MASK;
  14514
  14515	switch (test_type) {
  14516	case CLASSIC:
  14517		bpf_prog_destroy(fp);
  14518		break;
  14519	case INTERNAL:
  14520		bpf_prog_free(fp);
  14521		break;
  14522	}
  14523}
  14524
  14525static int __run_one(const struct bpf_prog *fp, const void *data,
  14526		     int runs, u64 *duration)
  14527{
  14528	u64 start, finish;
  14529	int ret = 0, i;
  14530
  14531	migrate_disable();
  14532	start = ktime_get_ns();
  14533
  14534	for (i = 0; i < runs; i++)
  14535		ret = bpf_prog_run(fp, data);
  14536
  14537	finish = ktime_get_ns();
  14538	migrate_enable();
  14539
  14540	*duration = finish - start;
  14541	do_div(*duration, runs);
  14542
  14543	return ret;
  14544}
  14545
  14546static int run_one(const struct bpf_prog *fp, struct bpf_test *test)
  14547{
  14548	int err_cnt = 0, i, runs = MAX_TESTRUNS;
  14549
  14550	if (test->nr_testruns)
  14551		runs = min(test->nr_testruns, MAX_TESTRUNS);
  14552
  14553	for (i = 0; i < MAX_SUBTESTS; i++) {
  14554		void *data;
  14555		u64 duration;
  14556		u32 ret;
  14557
  14558		/*
  14559		 * NOTE: Several sub-tests may be present, in which case
  14560		 * a zero {data_size, result} tuple indicates the end of
  14561		 * the sub-test array. The first test is always run,
  14562		 * even if both data_size and result happen to be zero.
  14563		 */
  14564		if (i > 0 &&
  14565		    test->test[i].data_size == 0 &&
  14566		    test->test[i].result == 0)
  14567			break;
  14568
  14569		data = generate_test_data(test, i);
  14570		if (!data && !(test->aux & FLAG_NO_DATA)) {
  14571			pr_cont("data generation failed ");
  14572			err_cnt++;
  14573			break;
  14574		}
  14575		ret = __run_one(fp, data, runs, &duration);
  14576		release_test_data(test, data);
  14577
  14578		if (ret == test->test[i].result) {
  14579			pr_cont("%lld ", duration);
  14580		} else {
  14581			pr_cont("ret %d != %d ", ret,
  14582				test->test[i].result);
  14583			err_cnt++;
  14584		}
  14585	}
  14586
  14587	return err_cnt;
  14588}
  14589
  14590static char test_name[64];
  14591module_param_string(test_name, test_name, sizeof(test_name), 0);
  14592
  14593static int test_id = -1;
  14594module_param(test_id, int, 0);
  14595
  14596static int test_range[2] = { 0, INT_MAX };
  14597module_param_array(test_range, int, NULL, 0);
  14598
  14599static bool exclude_test(int test_id)
  14600{
  14601	return test_id < test_range[0] || test_id > test_range[1];
  14602}
  14603
  14604static __init struct sk_buff *build_test_skb(void)
  14605{
  14606	u32 headroom = NET_SKB_PAD + NET_IP_ALIGN + ETH_HLEN;
  14607	struct sk_buff *skb[2];
  14608	struct page *page[2];
  14609	int i, data_size = 8;
  14610
  14611	for (i = 0; i < 2; i++) {
  14612		page[i] = alloc_page(GFP_KERNEL);
  14613		if (!page[i]) {
  14614			if (i == 0)
  14615				goto err_page0;
  14616			else
  14617				goto err_page1;
  14618		}
  14619
  14620		/* this will set skb[i]->head_frag */
  14621		skb[i] = dev_alloc_skb(headroom + data_size);
  14622		if (!skb[i]) {
  14623			if (i == 0)
  14624				goto err_skb0;
  14625			else
  14626				goto err_skb1;
  14627		}
  14628
  14629		skb_reserve(skb[i], headroom);
  14630		skb_put(skb[i], data_size);
  14631		skb[i]->protocol = htons(ETH_P_IP);
  14632		skb_reset_network_header(skb[i]);
  14633		skb_set_mac_header(skb[i], -ETH_HLEN);
  14634
  14635		skb_add_rx_frag(skb[i], 0, page[i], 0, 64, 64);
  14636		// skb_headlen(skb[i]): 8, skb[i]->head_frag = 1
  14637	}
  14638
  14639	/* setup shinfo */
  14640	skb_shinfo(skb[0])->gso_size = 1448;
  14641	skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV4;
  14642	skb_shinfo(skb[0])->gso_type |= SKB_GSO_DODGY;
  14643	skb_shinfo(skb[0])->gso_segs = 0;
  14644	skb_shinfo(skb[0])->frag_list = skb[1];
  14645	skb_shinfo(skb[0])->hwtstamps.hwtstamp = 1000;
  14646
  14647	/* adjust skb[0]'s len */
  14648	skb[0]->len += skb[1]->len;
  14649	skb[0]->data_len += skb[1]->data_len;
  14650	skb[0]->truesize += skb[1]->truesize;
  14651
  14652	return skb[0];
  14653
  14654err_skb1:
  14655	__free_page(page[1]);
  14656err_page1:
  14657	kfree_skb(skb[0]);
  14658err_skb0:
  14659	__free_page(page[0]);
  14660err_page0:
  14661	return NULL;
  14662}
  14663
  14664static __init struct sk_buff *build_test_skb_linear_no_head_frag(void)
  14665{
  14666	unsigned int alloc_size = 2000;
  14667	unsigned int headroom = 102, doffset = 72, data_size = 1308;
  14668	struct sk_buff *skb[2];
  14669	int i;
  14670
  14671	/* skbs linked in a frag_list, both with linear data, with head_frag=0
  14672	 * (data allocated by kmalloc), both have tcp data of 1308 bytes
  14673	 * (total payload is 2616 bytes).
  14674	 * Data offset is 72 bytes (40 ipv6 hdr, 32 tcp hdr). Some headroom.
  14675	 */
  14676	for (i = 0; i < 2; i++) {
  14677		skb[i] = alloc_skb(alloc_size, GFP_KERNEL);
  14678		if (!skb[i]) {
  14679			if (i == 0)
  14680				goto err_skb0;
  14681			else
  14682				goto err_skb1;
  14683		}
  14684
  14685		skb[i]->protocol = htons(ETH_P_IPV6);
  14686		skb_reserve(skb[i], headroom);
  14687		skb_put(skb[i], doffset + data_size);
  14688		skb_reset_network_header(skb[i]);
  14689		if (i == 0)
  14690			skb_reset_mac_header(skb[i]);
  14691		else
  14692			skb_set_mac_header(skb[i], -ETH_HLEN);
  14693		__skb_pull(skb[i], doffset);
  14694	}
  14695
  14696	/* setup shinfo.
  14697	 * mimic bpf_skb_proto_4_to_6, which resets gso_segs and assigns a
  14698	 * reduced gso_size.
  14699	 */
  14700	skb_shinfo(skb[0])->gso_size = 1288;
  14701	skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV6 | SKB_GSO_DODGY;
  14702	skb_shinfo(skb[0])->gso_segs = 0;
  14703	skb_shinfo(skb[0])->frag_list = skb[1];
  14704
  14705	/* adjust skb[0]'s len */
  14706	skb[0]->len += skb[1]->len;
  14707	skb[0]->data_len += skb[1]->len;
  14708	skb[0]->truesize += skb[1]->truesize;
  14709
  14710	return skb[0];
  14711
  14712err_skb1:
  14713	kfree_skb(skb[0]);
  14714err_skb0:
  14715	return NULL;
  14716}
  14717
  14718struct skb_segment_test {
  14719	const char *descr;
  14720	struct sk_buff *(*build_skb)(void);
  14721	netdev_features_t features;
  14722};
  14723
  14724static struct skb_segment_test skb_segment_tests[] __initconst = {
  14725	{
  14726		.descr = "gso_with_rx_frags",
  14727		.build_skb = build_test_skb,
  14728		.features = NETIF_F_SG | NETIF_F_GSO_PARTIAL | NETIF_F_IP_CSUM |
  14729			    NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM
  14730	},
  14731	{
  14732		.descr = "gso_linear_no_head_frag",
  14733		.build_skb = build_test_skb_linear_no_head_frag,
  14734		.features = NETIF_F_SG | NETIF_F_FRAGLIST |
  14735			    NETIF_F_HW_VLAN_CTAG_TX | NETIF_F_GSO |
  14736			    NETIF_F_LLTX_BIT | NETIF_F_GRO |
  14737			    NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM |
  14738			    NETIF_F_HW_VLAN_STAG_TX_BIT
  14739	}
  14740};
  14741
  14742static __init int test_skb_segment_single(const struct skb_segment_test *test)
  14743{
  14744	struct sk_buff *skb, *segs;
  14745	int ret = -1;
  14746
  14747	skb = test->build_skb();
  14748	if (!skb) {
  14749		pr_info("%s: failed to build_test_skb", __func__);
  14750		goto done;
  14751	}
  14752
  14753	segs = skb_segment(skb, test->features);
  14754	if (!IS_ERR(segs)) {
  14755		kfree_skb_list(segs);
  14756		ret = 0;
  14757	}
  14758	kfree_skb(skb);
  14759done:
  14760	return ret;
  14761}
  14762
  14763static __init int test_skb_segment(void)
  14764{
  14765	int i, err_cnt = 0, pass_cnt = 0;
  14766
  14767	for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
  14768		const struct skb_segment_test *test = &skb_segment_tests[i];
  14769
  14770		cond_resched();
  14771		if (exclude_test(i))
  14772			continue;
  14773
  14774		pr_info("#%d %s ", i, test->descr);
  14775
  14776		if (test_skb_segment_single(test)) {
  14777			pr_cont("FAIL\n");
  14778			err_cnt++;
  14779		} else {
  14780			pr_cont("PASS\n");
  14781			pass_cnt++;
  14782		}
  14783	}
  14784
  14785	pr_info("%s: Summary: %d PASSED, %d FAILED\n", __func__,
  14786		pass_cnt, err_cnt);
  14787	return err_cnt ? -EINVAL : 0;
  14788}
  14789
  14790static __init int test_bpf(void)
  14791{
  14792	int i, err_cnt = 0, pass_cnt = 0;
  14793	int jit_cnt = 0, run_cnt = 0;
  14794
  14795	for (i = 0; i < ARRAY_SIZE(tests); i++) {
  14796		struct bpf_prog *fp;
  14797		int err;
  14798
  14799		cond_resched();
  14800		if (exclude_test(i))
  14801			continue;
  14802
  14803		pr_info("#%d %s ", i, tests[i].descr);
  14804
  14805		if (tests[i].fill_helper &&
  14806		    tests[i].fill_helper(&tests[i]) < 0) {
  14807			pr_cont("FAIL to prog_fill\n");
  14808			continue;
  14809		}
  14810
  14811		fp = generate_filter(i, &err);
  14812
  14813		if (tests[i].fill_helper) {
  14814			kfree(tests[i].u.ptr.insns);
  14815			tests[i].u.ptr.insns = NULL;
  14816		}
  14817
  14818		if (fp == NULL) {
  14819			if (err == 0) {
  14820				pass_cnt++;
  14821				continue;
  14822			}
  14823			err_cnt++;
  14824			continue;
  14825		}
  14826
  14827		pr_cont("jited:%u ", fp->jited);
  14828
  14829		run_cnt++;
  14830		if (fp->jited)
  14831			jit_cnt++;
  14832
  14833		err = run_one(fp, &tests[i]);
  14834		release_filter(fp, i);
  14835
  14836		if (err) {
  14837			pr_cont("FAIL (%d times)\n", err);
  14838			err_cnt++;
  14839		} else {
  14840			pr_cont("PASS\n");
  14841			pass_cnt++;
  14842		}
  14843	}
  14844
  14845	pr_info("Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
  14846		pass_cnt, err_cnt, jit_cnt, run_cnt);
  14847
  14848	return err_cnt ? -EINVAL : 0;
  14849}
  14850
  14851struct tail_call_test {
  14852	const char *descr;
  14853	struct bpf_insn insns[MAX_INSNS];
  14854	int flags;
  14855	int result;
  14856	int stack_depth;
  14857};
  14858
  14859/* Flags that can be passed to tail call test cases */
  14860#define FLAG_NEED_STATE		BIT(0)
  14861#define FLAG_RESULT_IN_STATE	BIT(1)
  14862
  14863/*
  14864 * Magic marker used in test snippets for tail calls below.
  14865 * BPF_LD/MOV to R2 and R2 with this immediate value is replaced
  14866 * with the proper values by the test runner.
  14867 */
  14868#define TAIL_CALL_MARKER 0x7a11ca11
  14869
  14870/* Special offset to indicate a NULL call target */
  14871#define TAIL_CALL_NULL 0x7fff
  14872
  14873/* Special offset to indicate an out-of-range index */
  14874#define TAIL_CALL_INVALID 0x7ffe
  14875
  14876#define TAIL_CALL(offset)			       \
  14877	BPF_LD_IMM64(R2, TAIL_CALL_MARKER),	       \
  14878	BPF_RAW_INSN(BPF_ALU | BPF_MOV | BPF_K, R3, 0, \
  14879		     offset, TAIL_CALL_MARKER),	       \
  14880	BPF_JMP_IMM(BPF_TAIL_CALL, 0, 0, 0)
  14881
  14882/*
  14883 * A test function to be called from a BPF program, clobbering a lot of
  14884 * CPU registers in the process. A JITed BPF program calling this function
  14885 * must save and restore any caller-saved registers it uses for internal
  14886 * state, for example the current tail call count.
  14887 */
  14888BPF_CALL_1(bpf_test_func, u64, arg)
  14889{
  14890	char buf[64];
  14891	long a = 0;
  14892	long b = 1;
  14893	long c = 2;
  14894	long d = 3;
  14895	long e = 4;
  14896	long f = 5;
  14897	long g = 6;
  14898	long h = 7;
  14899
  14900	return snprintf(buf, sizeof(buf),
  14901			"%ld %lu %lx %ld %lu %lx %ld %lu %x",
  14902			a, b, c, d, e, f, g, h, (int)arg);
  14903}
  14904#define BPF_FUNC_test_func __BPF_FUNC_MAX_ID
  14905
  14906/*
  14907 * Tail call tests. Each test case may call any other test in the table,
  14908 * including itself, specified as a relative index offset from the calling
  14909 * test. The index TAIL_CALL_NULL can be used to specify a NULL target
  14910 * function to test the JIT error path. Similarly, the index TAIL_CALL_INVALID
  14911 * results in a target index that is out of range.
  14912 */
  14913static struct tail_call_test tail_call_tests[] = {
  14914	{
  14915		"Tail call leaf",
  14916		.insns = {
  14917			BPF_ALU64_REG(BPF_MOV, R0, R1),
  14918			BPF_ALU64_IMM(BPF_ADD, R0, 1),
  14919			BPF_EXIT_INSN(),
  14920		},
  14921		.result = 1,
  14922	},
  14923	{
  14924		"Tail call 2",
  14925		.insns = {
  14926			BPF_ALU64_IMM(BPF_ADD, R1, 2),
  14927			TAIL_CALL(-1),
  14928			BPF_ALU64_IMM(BPF_MOV, R0, -1),
  14929			BPF_EXIT_INSN(),
  14930		},
  14931		.result = 3,
  14932	},
  14933	{
  14934		"Tail call 3",
  14935		.insns = {
  14936			BPF_ALU64_IMM(BPF_ADD, R1, 3),
  14937			TAIL_CALL(-1),
  14938			BPF_ALU64_IMM(BPF_MOV, R0, -1),
  14939			BPF_EXIT_INSN(),
  14940		},
  14941		.result = 6,
  14942	},
  14943	{
  14944		"Tail call 4",
  14945		.insns = {
  14946			BPF_ALU64_IMM(BPF_ADD, R1, 4),
  14947			TAIL_CALL(-1),
  14948			BPF_ALU64_IMM(BPF_MOV, R0, -1),
  14949			BPF_EXIT_INSN(),
  14950		},
  14951		.result = 10,
  14952	},
  14953	{
  14954		"Tail call load/store leaf",
  14955		.insns = {
  14956			BPF_ALU64_IMM(BPF_MOV, R1, 1),
  14957			BPF_ALU64_IMM(BPF_MOV, R2, 2),
  14958			BPF_ALU64_REG(BPF_MOV, R3, BPF_REG_FP),
  14959			BPF_STX_MEM(BPF_DW, R3, R1, -8),
  14960			BPF_STX_MEM(BPF_DW, R3, R2, -16),
  14961			BPF_LDX_MEM(BPF_DW, R0, BPF_REG_FP, -8),
  14962			BPF_JMP_REG(BPF_JNE, R0, R1, 3),
  14963			BPF_LDX_MEM(BPF_DW, R0, BPF_REG_FP, -16),
  14964			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
  14965			BPF_ALU64_IMM(BPF_MOV, R0, 0),
  14966			BPF_EXIT_INSN(),
  14967		},
  14968		.result = 0,
  14969		.stack_depth = 32,
  14970	},
  14971	{
  14972		"Tail call load/store",
  14973		.insns = {
  14974			BPF_ALU64_IMM(BPF_MOV, R0, 3),
  14975			BPF_STX_MEM(BPF_DW, BPF_REG_FP, R0, -8),
  14976			TAIL_CALL(-1),
  14977			BPF_ALU64_IMM(BPF_MOV, R0, -1),
  14978			BPF_EXIT_INSN(),
  14979		},
  14980		.result = 0,
  14981		.stack_depth = 16,
  14982	},
  14983	{
  14984		"Tail call error path, max count reached",
  14985		.insns = {
  14986			BPF_LDX_MEM(BPF_W, R2, R1, 0),
  14987			BPF_ALU64_IMM(BPF_ADD, R2, 1),
  14988			BPF_STX_MEM(BPF_W, R1, R2, 0),
  14989			TAIL_CALL(0),
  14990			BPF_EXIT_INSN(),
  14991		},
  14992		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
  14993		.result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
  14994	},
  14995	{
  14996		"Tail call count preserved across function calls",
  14997		.insns = {
  14998			BPF_LDX_MEM(BPF_W, R2, R1, 0),
  14999			BPF_ALU64_IMM(BPF_ADD, R2, 1),
  15000			BPF_STX_MEM(BPF_W, R1, R2, 0),
  15001			BPF_STX_MEM(BPF_DW, R10, R1, -8),
  15002			BPF_CALL_REL(BPF_FUNC_get_numa_node_id),
  15003			BPF_CALL_REL(BPF_FUNC_ktime_get_ns),
  15004			BPF_CALL_REL(BPF_FUNC_ktime_get_boot_ns),
  15005			BPF_CALL_REL(BPF_FUNC_ktime_get_coarse_ns),
  15006			BPF_CALL_REL(BPF_FUNC_jiffies64),
  15007			BPF_CALL_REL(BPF_FUNC_test_func),
  15008			BPF_LDX_MEM(BPF_DW, R1, R10, -8),
  15009			BPF_ALU32_REG(BPF_MOV, R0, R1),
  15010			TAIL_CALL(0),
  15011			BPF_EXIT_INSN(),
  15012		},
  15013		.stack_depth = 8,
  15014		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
  15015		.result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
  15016	},
  15017	{
  15018		"Tail call error path, NULL target",
  15019		.insns = {
  15020			BPF_LDX_MEM(BPF_W, R2, R1, 0),
  15021			BPF_ALU64_IMM(BPF_ADD, R2, 1),
  15022			BPF_STX_MEM(BPF_W, R1, R2, 0),
  15023			TAIL_CALL(TAIL_CALL_NULL),
  15024			BPF_EXIT_INSN(),
  15025		},
  15026		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
  15027		.result = MAX_TESTRUNS,
  15028	},
  15029	{
  15030		"Tail call error path, index out of range",
  15031		.insns = {
  15032			BPF_LDX_MEM(BPF_W, R2, R1, 0),
  15033			BPF_ALU64_IMM(BPF_ADD, R2, 1),
  15034			BPF_STX_MEM(BPF_W, R1, R2, 0),
  15035			TAIL_CALL(TAIL_CALL_INVALID),
  15036			BPF_EXIT_INSN(),
  15037		},
  15038		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
  15039		.result = MAX_TESTRUNS,
  15040	},
  15041};
  15042
  15043static void __init destroy_tail_call_tests(struct bpf_array *progs)
  15044{
  15045	int i;
  15046
  15047	for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++)
  15048		if (progs->ptrs[i])
  15049			bpf_prog_free(progs->ptrs[i]);
  15050	kfree(progs);
  15051}
  15052
  15053static __init int prepare_tail_call_tests(struct bpf_array **pprogs)
  15054{
  15055	int ntests = ARRAY_SIZE(tail_call_tests);
  15056	struct bpf_array *progs;
  15057	int which, err;
  15058
  15059	/* Allocate the table of programs to be used for tall calls */
  15060	progs = kzalloc(sizeof(*progs) + (ntests + 1) * sizeof(progs->ptrs[0]),
  15061			GFP_KERNEL);
  15062	if (!progs)
  15063		goto out_nomem;
  15064
  15065	/* Create all eBPF programs and populate the table */
  15066	for (which = 0; which < ntests; which++) {
  15067		struct tail_call_test *test = &tail_call_tests[which];
  15068		struct bpf_prog *fp;
  15069		int len, i;
  15070
  15071		/* Compute the number of program instructions */
  15072		for (len = 0; len < MAX_INSNS; len++) {
  15073			struct bpf_insn *insn = &test->insns[len];
  15074
  15075			if (len < MAX_INSNS - 1 &&
  15076			    insn->code == (BPF_LD | BPF_DW | BPF_IMM))
  15077				len++;
  15078			if (insn->code == 0)
  15079				break;
  15080		}
  15081
  15082		/* Allocate and initialize the program */
  15083		fp = bpf_prog_alloc(bpf_prog_size(len), 0);
  15084		if (!fp)
  15085			goto out_nomem;
  15086
  15087		fp->len = len;
  15088		fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
  15089		fp->aux->stack_depth = test->stack_depth;
  15090		memcpy(fp->insnsi, test->insns, len * sizeof(struct bpf_insn));
  15091
  15092		/* Relocate runtime tail call offsets and addresses */
  15093		for (i = 0; i < len; i++) {
  15094			struct bpf_insn *insn = &fp->insnsi[i];
  15095			long addr = 0;
  15096
  15097			switch (insn->code) {
  15098			case BPF_LD | BPF_DW | BPF_IMM:
  15099				if (insn->imm != TAIL_CALL_MARKER)
  15100					break;
  15101				insn[0].imm = (u32)(long)progs;
  15102				insn[1].imm = ((u64)(long)progs) >> 32;
  15103				break;
  15104
  15105			case BPF_ALU | BPF_MOV | BPF_K:
  15106				if (insn->imm != TAIL_CALL_MARKER)
  15107					break;
  15108				if (insn->off == TAIL_CALL_NULL)
  15109					insn->imm = ntests;
  15110				else if (insn->off == TAIL_CALL_INVALID)
  15111					insn->imm = ntests + 1;
  15112				else
  15113					insn->imm = which + insn->off;
  15114				insn->off = 0;
  15115				break;
  15116
  15117			case BPF_JMP | BPF_CALL:
  15118				if (insn->src_reg != BPF_PSEUDO_CALL)
  15119					break;
  15120				switch (insn->imm) {
  15121				case BPF_FUNC_get_numa_node_id:
  15122					addr = (long)&numa_node_id;
  15123					break;
  15124				case BPF_FUNC_ktime_get_ns:
  15125					addr = (long)&ktime_get_ns;
  15126					break;
  15127				case BPF_FUNC_ktime_get_boot_ns:
  15128					addr = (long)&ktime_get_boot_fast_ns;
  15129					break;
  15130				case BPF_FUNC_ktime_get_coarse_ns:
  15131					addr = (long)&ktime_get_coarse_ns;
  15132					break;
  15133				case BPF_FUNC_jiffies64:
  15134					addr = (long)&get_jiffies_64;
  15135					break;
  15136				case BPF_FUNC_test_func:
  15137					addr = (long)&bpf_test_func;
  15138					break;
  15139				default:
  15140					err = -EFAULT;
  15141					goto out_err;
  15142				}
  15143				*insn = BPF_EMIT_CALL(addr);
  15144				if ((long)__bpf_call_base + insn->imm != addr)
  15145					*insn = BPF_JMP_A(0); /* Skip: NOP */
  15146				break;
  15147			}
  15148		}
  15149
  15150		fp = bpf_prog_select_runtime(fp, &err);
  15151		if (err)
  15152			goto out_err;
  15153
  15154		progs->ptrs[which] = fp;
  15155	}
  15156
  15157	/* The last entry contains a NULL program pointer */
  15158	progs->map.max_entries = ntests + 1;
  15159	*pprogs = progs;
  15160	return 0;
  15161
  15162out_nomem:
  15163	err = -ENOMEM;
  15164
  15165out_err:
  15166	if (progs)
  15167		destroy_tail_call_tests(progs);
  15168	return err;
  15169}
  15170
  15171static __init int test_tail_calls(struct bpf_array *progs)
  15172{
  15173	int i, err_cnt = 0, pass_cnt = 0;
  15174	int jit_cnt = 0, run_cnt = 0;
  15175
  15176	for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
  15177		struct tail_call_test *test = &tail_call_tests[i];
  15178		struct bpf_prog *fp = progs->ptrs[i];
  15179		int *data = NULL;
  15180		int state = 0;
  15181		u64 duration;
  15182		int ret;
  15183
  15184		cond_resched();
  15185		if (exclude_test(i))
  15186			continue;
  15187
  15188		pr_info("#%d %s ", i, test->descr);
  15189		if (!fp) {
  15190			err_cnt++;
  15191			continue;
  15192		}
  15193		pr_cont("jited:%u ", fp->jited);
  15194
  15195		run_cnt++;
  15196		if (fp->jited)
  15197			jit_cnt++;
  15198
  15199		if (test->flags & FLAG_NEED_STATE)
  15200			data = &state;
  15201		ret = __run_one(fp, data, MAX_TESTRUNS, &duration);
  15202		if (test->flags & FLAG_RESULT_IN_STATE)
  15203			ret = state;
  15204		if (ret == test->result) {
  15205			pr_cont("%lld PASS", duration);
  15206			pass_cnt++;
  15207		} else {
  15208			pr_cont("ret %d != %d FAIL", ret, test->result);
  15209			err_cnt++;
  15210		}
  15211	}
  15212
  15213	pr_info("%s: Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
  15214		__func__, pass_cnt, err_cnt, jit_cnt, run_cnt);
  15215
  15216	return err_cnt ? -EINVAL : 0;
  15217}
  15218
  15219static char test_suite[32];
  15220module_param_string(test_suite, test_suite, sizeof(test_suite), 0);
  15221
  15222static __init int find_test_index(const char *test_name)
  15223{
  15224	int i;
  15225
  15226	if (!strcmp(test_suite, "test_bpf")) {
  15227		for (i = 0; i < ARRAY_SIZE(tests); i++) {
  15228			if (!strcmp(tests[i].descr, test_name))
  15229				return i;
  15230		}
  15231	}
  15232
  15233	if (!strcmp(test_suite, "test_tail_calls")) {
  15234		for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
  15235			if (!strcmp(tail_call_tests[i].descr, test_name))
  15236				return i;
  15237		}
  15238	}
  15239
  15240	if (!strcmp(test_suite, "test_skb_segment")) {
  15241		for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
  15242			if (!strcmp(skb_segment_tests[i].descr, test_name))
  15243				return i;
  15244		}
  15245	}
  15246
  15247	return -1;
  15248}
  15249
  15250static __init int prepare_test_range(void)
  15251{
  15252	int valid_range;
  15253
  15254	if (!strcmp(test_suite, "test_bpf"))
  15255		valid_range = ARRAY_SIZE(tests);
  15256	else if (!strcmp(test_suite, "test_tail_calls"))
  15257		valid_range = ARRAY_SIZE(tail_call_tests);
  15258	else if (!strcmp(test_suite, "test_skb_segment"))
  15259		valid_range = ARRAY_SIZE(skb_segment_tests);
  15260	else
  15261		return 0;
  15262
  15263	if (test_id >= 0) {
  15264		/*
  15265		 * if a test_id was specified, use test_range to
  15266		 * cover only that test.
  15267		 */
  15268		if (test_id >= valid_range) {
  15269			pr_err("test_bpf: invalid test_id specified for '%s' suite.\n",
  15270			       test_suite);
  15271			return -EINVAL;
  15272		}
  15273
  15274		test_range[0] = test_id;
  15275		test_range[1] = test_id;
  15276	} else if (*test_name) {
  15277		/*
  15278		 * if a test_name was specified, find it and setup
  15279		 * test_range to cover only that test.
  15280		 */
  15281		int idx = find_test_index(test_name);
  15282
  15283		if (idx < 0) {
  15284			pr_err("test_bpf: no test named '%s' found for '%s' suite.\n",
  15285			       test_name, test_suite);
  15286			return -EINVAL;
  15287		}
  15288		test_range[0] = idx;
  15289		test_range[1] = idx;
  15290	} else if (test_range[0] != 0 || test_range[1] != INT_MAX) {
  15291		/*
  15292		 * check that the supplied test_range is valid.
  15293		 */
  15294		if (test_range[0] < 0 || test_range[1] >= valid_range) {
  15295			pr_err("test_bpf: test_range is out of bound for '%s' suite.\n",
  15296			       test_suite);
  15297			return -EINVAL;
  15298		}
  15299
  15300		if (test_range[1] < test_range[0]) {
  15301			pr_err("test_bpf: test_range is ending before it starts.\n");
  15302			return -EINVAL;
  15303		}
  15304	}
  15305
  15306	return 0;
  15307}
  15308
  15309static int __init test_bpf_init(void)
  15310{
  15311	struct bpf_array *progs = NULL;
  15312	int ret;
  15313
  15314	if (strlen(test_suite) &&
  15315	    strcmp(test_suite, "test_bpf") &&
  15316	    strcmp(test_suite, "test_tail_calls") &&
  15317	    strcmp(test_suite, "test_skb_segment")) {
  15318		pr_err("test_bpf: invalid test_suite '%s' specified.\n", test_suite);
  15319		return -EINVAL;
  15320	}
  15321
  15322	/*
  15323	 * if test_suite is not specified, but test_id, test_name or test_range
  15324	 * is specified, set 'test_bpf' as the default test suite.
  15325	 */
  15326	if (!strlen(test_suite) &&
  15327	    (test_id != -1 || strlen(test_name) ||
  15328	    (test_range[0] != 0 || test_range[1] != INT_MAX))) {
  15329		pr_info("test_bpf: set 'test_bpf' as the default test_suite.\n");
  15330		strscpy(test_suite, "test_bpf", sizeof(test_suite));
  15331	}
  15332
  15333	ret = prepare_test_range();
  15334	if (ret < 0)
  15335		return ret;
  15336
  15337	if (!strlen(test_suite) || !strcmp(test_suite, "test_bpf")) {
  15338		ret = test_bpf();
  15339		if (ret)
  15340			return ret;
  15341	}
  15342
  15343	if (!strlen(test_suite) || !strcmp(test_suite, "test_tail_calls")) {
  15344		ret = prepare_tail_call_tests(&progs);
  15345		if (ret)
  15346			return ret;
  15347		ret = test_tail_calls(progs);
  15348		destroy_tail_call_tests(progs);
  15349		if (ret)
  15350			return ret;
  15351	}
  15352
  15353	if (!strlen(test_suite) || !strcmp(test_suite, "test_skb_segment"))
  15354		return test_skb_segment();
  15355
  15356	return 0;
  15357}
  15358
  15359static void __exit test_bpf_exit(void)
  15360{
  15361}
  15362
  15363module_init(test_bpf_init);
  15364module_exit(test_bpf_exit);
  15365
  15366MODULE_LICENSE("GPL");