diff options
Diffstat (limited to 'lib/test_bpf.c')
| -rw-r--r-- | lib/test_bpf.c | 120 | 
1 files changed, 120 insertions, 0 deletions
| diff --git a/lib/test_bpf.c b/lib/test_bpf.c index 10cd1860e5b0..27a7a26b1ece 100644 --- a/lib/test_bpf.c +++ b/lib/test_bpf.c @@ -1685,6 +1685,126 @@ static struct bpf_test tests[] = {  		{ },  		{ { 0, 0x35d97ef2 } }  	}, +	{	/* Mainly checking JIT here. */ +		"MOV REG64", +		.u.insns_int = { +			BPF_LD_IMM64(R0, 0xffffffffffffffffLL), +			BPF_MOV64_REG(R1, R0), +			BPF_MOV64_REG(R2, R1), +			BPF_MOV64_REG(R3, R2), +			BPF_MOV64_REG(R4, R3), +			BPF_MOV64_REG(R5, R4), +			BPF_MOV64_REG(R6, R5), +			BPF_MOV64_REG(R7, R6), +			BPF_MOV64_REG(R8, R7), +			BPF_MOV64_REG(R9, R8), +			BPF_ALU64_IMM(BPF_MOV, R0, 0), +			BPF_ALU64_IMM(BPF_MOV, R1, 0), +			BPF_ALU64_IMM(BPF_MOV, R2, 0), +			BPF_ALU64_IMM(BPF_MOV, R3, 0), +			BPF_ALU64_IMM(BPF_MOV, R4, 0), +			BPF_ALU64_IMM(BPF_MOV, R5, 0), +			BPF_ALU64_IMM(BPF_MOV, R6, 0), +			BPF_ALU64_IMM(BPF_MOV, R7, 0), +			BPF_ALU64_IMM(BPF_MOV, R8, 0), +			BPF_ALU64_IMM(BPF_MOV, R9, 0), +			BPF_ALU64_REG(BPF_ADD, R0, R0), +			BPF_ALU64_REG(BPF_ADD, R0, R1), +			BPF_ALU64_REG(BPF_ADD, R0, R2), +			BPF_ALU64_REG(BPF_ADD, R0, R3), +			BPF_ALU64_REG(BPF_ADD, R0, R4), +			BPF_ALU64_REG(BPF_ADD, R0, R5), +			BPF_ALU64_REG(BPF_ADD, R0, R6), +			BPF_ALU64_REG(BPF_ADD, R0, R7), +			BPF_ALU64_REG(BPF_ADD, R0, R8), +			BPF_ALU64_REG(BPF_ADD, R0, R9), +			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe), +			BPF_EXIT_INSN(), +		}, +		INTERNAL, +		{ }, +		{ { 0, 0xfefe } } +	}, +	{	/* Mainly checking JIT here. */ +		"MOV REG32", +		.u.insns_int = { +			BPF_LD_IMM64(R0, 0xffffffffffffffffLL), +			BPF_MOV64_REG(R1, R0), +			BPF_MOV64_REG(R2, R1), +			BPF_MOV64_REG(R3, R2), +			BPF_MOV64_REG(R4, R3), +			BPF_MOV64_REG(R5, R4), +			BPF_MOV64_REG(R6, R5), +			BPF_MOV64_REG(R7, R6), +			BPF_MOV64_REG(R8, R7), +			BPF_MOV64_REG(R9, R8), +			BPF_ALU32_IMM(BPF_MOV, R0, 0), +			BPF_ALU32_IMM(BPF_MOV, R1, 0), +			BPF_ALU32_IMM(BPF_MOV, R2, 0), +			BPF_ALU32_IMM(BPF_MOV, R3, 0), +			BPF_ALU32_IMM(BPF_MOV, R4, 0), +			BPF_ALU32_IMM(BPF_MOV, R5, 0), +			BPF_ALU32_IMM(BPF_MOV, R6, 0), +			BPF_ALU32_IMM(BPF_MOV, R7, 0), +			BPF_ALU32_IMM(BPF_MOV, R8, 0), +			BPF_ALU32_IMM(BPF_MOV, R9, 0), +			BPF_ALU64_REG(BPF_ADD, R0, R0), +			BPF_ALU64_REG(BPF_ADD, R0, R1), +			BPF_ALU64_REG(BPF_ADD, R0, R2), +			BPF_ALU64_REG(BPF_ADD, R0, R3), +			BPF_ALU64_REG(BPF_ADD, R0, R4), +			BPF_ALU64_REG(BPF_ADD, R0, R5), +			BPF_ALU64_REG(BPF_ADD, R0, R6), +			BPF_ALU64_REG(BPF_ADD, R0, R7), +			BPF_ALU64_REG(BPF_ADD, R0, R8), +			BPF_ALU64_REG(BPF_ADD, R0, R9), +			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe), +			BPF_EXIT_INSN(), +		}, +		INTERNAL, +		{ }, +		{ { 0, 0xfefe } } +	}, +	{	/* Mainly checking JIT here. */ +		"LD IMM64", +		.u.insns_int = { +			BPF_LD_IMM64(R0, 0xffffffffffffffffLL), +			BPF_MOV64_REG(R1, R0), +			BPF_MOV64_REG(R2, R1), +			BPF_MOV64_REG(R3, R2), +			BPF_MOV64_REG(R4, R3), +			BPF_MOV64_REG(R5, R4), +			BPF_MOV64_REG(R6, R5), +			BPF_MOV64_REG(R7, R6), +			BPF_MOV64_REG(R8, R7), +			BPF_MOV64_REG(R9, R8), +			BPF_LD_IMM64(R0, 0x0LL), +			BPF_LD_IMM64(R1, 0x0LL), +			BPF_LD_IMM64(R2, 0x0LL), +			BPF_LD_IMM64(R3, 0x0LL), +			BPF_LD_IMM64(R4, 0x0LL), +			BPF_LD_IMM64(R5, 0x0LL), +			BPF_LD_IMM64(R6, 0x0LL), +			BPF_LD_IMM64(R7, 0x0LL), +			BPF_LD_IMM64(R8, 0x0LL), +			BPF_LD_IMM64(R9, 0x0LL), +			BPF_ALU64_REG(BPF_ADD, R0, R0), +			BPF_ALU64_REG(BPF_ADD, R0, R1), +			BPF_ALU64_REG(BPF_ADD, R0, R2), +			BPF_ALU64_REG(BPF_ADD, R0, R3), +			BPF_ALU64_REG(BPF_ADD, R0, R4), +			BPF_ALU64_REG(BPF_ADD, R0, R5), +			BPF_ALU64_REG(BPF_ADD, R0, R6), +			BPF_ALU64_REG(BPF_ADD, R0, R7), +			BPF_ALU64_REG(BPF_ADD, R0, R8), +			BPF_ALU64_REG(BPF_ADD, R0, R9), +			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe), +			BPF_EXIT_INSN(), +		}, +		INTERNAL, +		{ }, +		{ { 0, 0xfefe } } +	},  	{  		"INT: ALU MIX",  		.u.insns_int = { | 
