summaryrefslogtreecommitdiff
path: root/arch/arm/net
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/net')
-rw-r--r--arch/arm/net/bpf_jit_32.c139
1 files changed, 67 insertions, 72 deletions
diff --git a/arch/arm/net/bpf_jit_32.c b/arch/arm/net/bpf_jit_32.c
index 6f879c319a9d..fb5503ce016f 100644
--- a/arch/arm/net/bpf_jit_32.c
+++ b/arch/arm/net/bpf_jit_32.c
@@ -136,7 +136,7 @@ static u16 saved_regs(struct jit_ctx *ctx)
u16 ret = 0;
if ((ctx->skf->len > 1) ||
- (ctx->skf->insns[0].code == BPF_S_RET_A))
+ (ctx->skf->insns[0].code == (BPF_RET | BPF_A)))
ret |= 1 << r_A;
#ifdef CONFIG_FRAME_POINTER
@@ -164,18 +164,10 @@ static inline int mem_words_used(struct jit_ctx *ctx)
static inline bool is_load_to_a(u16 inst)
{
switch (inst) {
- case BPF_S_LD_W_LEN:
- case BPF_S_LD_W_ABS:
- case BPF_S_LD_H_ABS:
- case BPF_S_LD_B_ABS:
- case BPF_S_ANC_CPU:
- case BPF_S_ANC_IFINDEX:
- case BPF_S_ANC_MARK:
- case BPF_S_ANC_PROTOCOL:
- case BPF_S_ANC_RXHASH:
- case BPF_S_ANC_VLAN_TAG:
- case BPF_S_ANC_VLAN_TAG_PRESENT:
- case BPF_S_ANC_QUEUE:
+ case BPF_LD | BPF_W | BPF_LEN:
+ case BPF_LD | BPF_W | BPF_ABS:
+ case BPF_LD | BPF_H | BPF_ABS:
+ case BPF_LD | BPF_B | BPF_ABS:
return true;
default:
return false;
@@ -215,7 +207,7 @@ static void build_prologue(struct jit_ctx *ctx)
emit(ARM_MOV_I(r_X, 0), ctx);
/* do not leak kernel data to userspace */
- if ((first_inst != BPF_S_RET_K) && !(is_load_to_a(first_inst)))
+ if ((first_inst != (BPF_RET | BPF_K)) && !(is_load_to_a(first_inst)))
emit(ARM_MOV_I(r_A, 0), ctx);
/* stack space for the BPF_MEM words */
@@ -480,36 +472,39 @@ static int build_body(struct jit_ctx *ctx)
u32 k;
for (i = 0; i < prog->len; i++) {
+ u16 code;
+
inst = &(prog->insns[i]);
/* K as an immediate value operand */
k = inst->k;
+ code = bpf_anc_helper(inst);
/* compute offsets only in the fake pass */
if (ctx->target == NULL)
ctx->offsets[i] = ctx->idx * 4;
- switch (inst->code) {
- case BPF_S_LD_IMM:
+ switch (code) {
+ case BPF_LD | BPF_IMM:
emit_mov_i(r_A, k, ctx);
break;
- case BPF_S_LD_W_LEN:
+ case BPF_LD | BPF_W | BPF_LEN:
ctx->seen |= SEEN_SKB;
BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, len) != 4);
emit(ARM_LDR_I(r_A, r_skb,
offsetof(struct sk_buff, len)), ctx);
break;
- case BPF_S_LD_MEM:
+ case BPF_LD | BPF_MEM:
/* A = scratch[k] */
ctx->seen |= SEEN_MEM_WORD(k);
emit(ARM_LDR_I(r_A, ARM_SP, SCRATCH_OFF(k)), ctx);
break;
- case BPF_S_LD_W_ABS:
+ case BPF_LD | BPF_W | BPF_ABS:
load_order = 2;
goto load;
- case BPF_S_LD_H_ABS:
+ case BPF_LD | BPF_H | BPF_ABS:
load_order = 1;
goto load;
- case BPF_S_LD_B_ABS:
+ case BPF_LD | BPF_B | BPF_ABS:
load_order = 0;
load:
/* the interpreter will deal with the negative K */
@@ -552,31 +547,31 @@ load_common:
emit_err_ret(ARM_COND_NE, ctx);
emit(ARM_MOV_R(r_A, ARM_R0), ctx);
break;
- case BPF_S_LD_W_IND:
+ case BPF_LD | BPF_W | BPF_IND:
load_order = 2;
goto load_ind;
- case BPF_S_LD_H_IND:
+ case BPF_LD | BPF_H | BPF_IND:
load_order = 1;
goto load_ind;
- case BPF_S_LD_B_IND:
+ case BPF_LD | BPF_B | BPF_IND:
load_order = 0;
load_ind:
OP_IMM3(ARM_ADD, r_off, r_X, k, ctx);
goto load_common;
- case BPF_S_LDX_IMM:
+ case BPF_LDX | BPF_IMM:
ctx->seen |= SEEN_X;
emit_mov_i(r_X, k, ctx);
break;
- case BPF_S_LDX_W_LEN:
+ case BPF_LDX | BPF_W | BPF_LEN:
ctx->seen |= SEEN_X | SEEN_SKB;
emit(ARM_LDR_I(r_X, r_skb,
offsetof(struct sk_buff, len)), ctx);
break;
- case BPF_S_LDX_MEM:
+ case BPF_LDX | BPF_MEM:
ctx->seen |= SEEN_X | SEEN_MEM_WORD(k);
emit(ARM_LDR_I(r_X, ARM_SP, SCRATCH_OFF(k)), ctx);
break;
- case BPF_S_LDX_B_MSH:
+ case BPF_LDX | BPF_B | BPF_MSH:
/* x = ((*(frame + k)) & 0xf) << 2; */
ctx->seen |= SEEN_X | SEEN_DATA | SEEN_CALL;
/* the interpreter should deal with the negative K */
@@ -606,113 +601,113 @@ load_ind:
emit(ARM_AND_I(r_X, ARM_R0, 0x00f), ctx);
emit(ARM_LSL_I(r_X, r_X, 2), ctx);
break;
- case BPF_S_ST:
+ case BPF_ST:
ctx->seen |= SEEN_MEM_WORD(k);
emit(ARM_STR_I(r_A, ARM_SP, SCRATCH_OFF(k)), ctx);
break;
- case BPF_S_STX:
+ case BPF_STX:
update_on_xread(ctx);
ctx->seen |= SEEN_MEM_WORD(k);
emit(ARM_STR_I(r_X, ARM_SP, SCRATCH_OFF(k)), ctx);
break;
- case BPF_S_ALU_ADD_K:
+ case BPF_ALU | BPF_ADD | BPF_K:
/* A += K */
OP_IMM3(ARM_ADD, r_A, r_A, k, ctx);
break;
- case BPF_S_ALU_ADD_X:
+ case BPF_ALU | BPF_ADD | BPF_X:
update_on_xread(ctx);
emit(ARM_ADD_R(r_A, r_A, r_X), ctx);
break;
- case BPF_S_ALU_SUB_K:
+ case BPF_ALU | BPF_SUB | BPF_K:
/* A -= K */
OP_IMM3(ARM_SUB, r_A, r_A, k, ctx);
break;
- case BPF_S_ALU_SUB_X:
+ case BPF_ALU | BPF_SUB | BPF_X:
update_on_xread(ctx);
emit(ARM_SUB_R(r_A, r_A, r_X), ctx);
break;
- case BPF_S_ALU_MUL_K:
+ case BPF_ALU | BPF_MUL | BPF_K:
/* A *= K */
emit_mov_i(r_scratch, k, ctx);
emit(ARM_MUL(r_A, r_A, r_scratch), ctx);
break;
- case BPF_S_ALU_MUL_X:
+ case BPF_ALU | BPF_MUL | BPF_X:
update_on_xread(ctx);
emit(ARM_MUL(r_A, r_A, r_X), ctx);
break;
- case BPF_S_ALU_DIV_K:
+ case BPF_ALU | BPF_DIV | BPF_K:
if (k == 1)
break;
emit_mov_i(r_scratch, k, ctx);
emit_udiv(r_A, r_A, r_scratch, ctx);
break;
- case BPF_S_ALU_DIV_X:
+ case BPF_ALU | BPF_DIV | BPF_X:
update_on_xread(ctx);
emit(ARM_CMP_I(r_X, 0), ctx);
emit_err_ret(ARM_COND_EQ, ctx);
emit_udiv(r_A, r_A, r_X, ctx);
break;
- case BPF_S_ALU_OR_K:
+ case BPF_ALU | BPF_OR | BPF_K:
/* A |= K */
OP_IMM3(ARM_ORR, r_A, r_A, k, ctx);
break;
- case BPF_S_ALU_OR_X:
+ case BPF_ALU | BPF_OR | BPF_X:
update_on_xread(ctx);
emit(ARM_ORR_R(r_A, r_A, r_X), ctx);
break;
- case BPF_S_ALU_XOR_K:
+ case BPF_ALU | BPF_XOR | BPF_K:
/* A ^= K; */
OP_IMM3(ARM_EOR, r_A, r_A, k, ctx);
break;
- case BPF_S_ANC_ALU_XOR_X:
- case BPF_S_ALU_XOR_X:
+ case BPF_ANC | SKF_AD_ALU_XOR_X:
+ case BPF_ALU | BPF_XOR | BPF_X:
/* A ^= X */
update_on_xread(ctx);
emit(ARM_EOR_R(r_A, r_A, r_X), ctx);
break;
- case BPF_S_ALU_AND_K:
+ case BPF_ALU | BPF_AND | BPF_K:
/* A &= K */
OP_IMM3(ARM_AND, r_A, r_A, k, ctx);
break;
- case BPF_S_ALU_AND_X:
+ case BPF_ALU | BPF_AND | BPF_X:
update_on_xread(ctx);
emit(ARM_AND_R(r_A, r_A, r_X), ctx);
break;
- case BPF_S_ALU_LSH_K:
+ case BPF_ALU | BPF_LSH | BPF_K:
if (unlikely(k > 31))
return -1;
emit(ARM_LSL_I(r_A, r_A, k), ctx);
break;
- case BPF_S_ALU_LSH_X:
+ case BPF_ALU | BPF_LSH | BPF_X:
update_on_xread(ctx);
emit(ARM_LSL_R(r_A, r_A, r_X), ctx);
break;
- case BPF_S_ALU_RSH_K:
+ case BPF_ALU | BPF_RSH | BPF_K:
if (unlikely(k > 31))
return -1;
emit(ARM_LSR_I(r_A, r_A, k), ctx);
break;
- case BPF_S_ALU_RSH_X:
+ case BPF_ALU | BPF_RSH | BPF_X:
update_on_xread(ctx);
emit(ARM_LSR_R(r_A, r_A, r_X), ctx);
break;
- case BPF_S_ALU_NEG:
+ case BPF_ALU | BPF_NEG:
/* A = -A */
emit(ARM_RSB_I(r_A, r_A, 0), ctx);
break;
- case BPF_S_JMP_JA:
+ case BPF_JMP | BPF_JA:
/* pc += K */
emit(ARM_B(b_imm(i + k + 1, ctx)), ctx);
break;
- case BPF_S_JMP_JEQ_K:
+ case BPF_JMP | BPF_JEQ | BPF_K:
/* pc += (A == K) ? pc->jt : pc->jf */
condt = ARM_COND_EQ;
goto cmp_imm;
- case BPF_S_JMP_JGT_K:
+ case BPF_JMP | BPF_JGT | BPF_K:
/* pc += (A > K) ? pc->jt : pc->jf */
condt = ARM_COND_HI;
goto cmp_imm;
- case BPF_S_JMP_JGE_K:
+ case BPF_JMP | BPF_JGE | BPF_K:
/* pc += (A >= K) ? pc->jt : pc->jf */
condt = ARM_COND_HS;
cmp_imm:
@@ -731,22 +726,22 @@ cond_jump:
_emit(condt ^ 1, ARM_B(b_imm(i + inst->jf + 1,
ctx)), ctx);
break;
- case BPF_S_JMP_JEQ_X:
+ case BPF_JMP | BPF_JEQ | BPF_X:
/* pc += (A == X) ? pc->jt : pc->jf */
condt = ARM_COND_EQ;
goto cmp_x;
- case BPF_S_JMP_JGT_X:
+ case BPF_JMP | BPF_JGT | BPF_X:
/* pc += (A > X) ? pc->jt : pc->jf */
condt = ARM_COND_HI;
goto cmp_x;
- case BPF_S_JMP_JGE_X:
+ case BPF_JMP | BPF_JGE | BPF_X:
/* pc += (A >= X) ? pc->jt : pc->jf */
condt = ARM_COND_CS;
cmp_x:
update_on_xread(ctx);
emit(ARM_CMP_R(r_A, r_X), ctx);
goto cond_jump;
- case BPF_S_JMP_JSET_K:
+ case BPF_JMP | BPF_JSET | BPF_K:
/* pc += (A & K) ? pc->jt : pc->jf */
condt = ARM_COND_NE;
/* not set iff all zeroes iff Z==1 iff EQ */
@@ -759,16 +754,16 @@ cmp_x:
emit(ARM_TST_I(r_A, imm12), ctx);
}
goto cond_jump;
- case BPF_S_JMP_JSET_X:
+ case BPF_JMP | BPF_JSET | BPF_X:
/* pc += (A & X) ? pc->jt : pc->jf */
update_on_xread(ctx);
condt = ARM_COND_NE;
emit(ARM_TST_R(r_A, r_X), ctx);
goto cond_jump;
- case BPF_S_RET_A:
+ case BPF_RET | BPF_A:
emit(ARM_MOV_R(ARM_R0, r_A), ctx);
goto b_epilogue;
- case BPF_S_RET_K:
+ case BPF_RET | BPF_K:
if ((k == 0) && (ctx->ret0_fp_idx < 0))
ctx->ret0_fp_idx = i;
emit_mov_i(ARM_R0, k, ctx);
@@ -776,17 +771,17 @@ b_epilogue:
if (i != ctx->skf->len - 1)
emit(ARM_B(b_imm(prog->len, ctx)), ctx);
break;
- case BPF_S_MISC_TAX:
+ case BPF_MISC | BPF_TAX:
/* X = A */
ctx->seen |= SEEN_X;
emit(ARM_MOV_R(r_X, r_A), ctx);
break;
- case BPF_S_MISC_TXA:
+ case BPF_MISC | BPF_TXA:
/* A = X */
update_on_xread(ctx);
emit(ARM_MOV_R(r_A, r_X), ctx);
break;
- case BPF_S_ANC_PROTOCOL:
+ case BPF_ANC | SKF_AD_PROTOCOL:
/* A = ntohs(skb->protocol) */
ctx->seen |= SEEN_SKB;
BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff,
@@ -795,7 +790,7 @@ b_epilogue:
emit(ARM_LDRH_I(r_scratch, r_skb, off), ctx);
emit_swap16(r_A, r_scratch, ctx);
break;
- case BPF_S_ANC_CPU:
+ case BPF_ANC | SKF_AD_CPU:
/* r_scratch = current_thread_info() */
OP_IMM3(ARM_BIC, r_scratch, ARM_SP, THREAD_SIZE - 1, ctx);
/* A = current_thread_info()->cpu */
@@ -803,7 +798,7 @@ b_epilogue:
off = offsetof(struct thread_info, cpu);
emit(ARM_LDR_I(r_A, r_scratch, off), ctx);
break;
- case BPF_S_ANC_IFINDEX:
+ case BPF_ANC | SKF_AD_IFINDEX:
/* A = skb->dev->ifindex */
ctx->seen |= SEEN_SKB;
off = offsetof(struct sk_buff, dev);
@@ -817,30 +812,30 @@ b_epilogue:
off = offsetof(struct net_device, ifindex);
emit(ARM_LDR_I(r_A, r_scratch, off), ctx);
break;
- case BPF_S_ANC_MARK:
+ case BPF_ANC | SKF_AD_MARK:
ctx->seen |= SEEN_SKB;
BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, mark) != 4);
off = offsetof(struct sk_buff, mark);
emit(ARM_LDR_I(r_A, r_skb, off), ctx);
break;
- case BPF_S_ANC_RXHASH:
+ case BPF_ANC | SKF_AD_RXHASH:
ctx->seen |= SEEN_SKB;
BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, hash) != 4);
off = offsetof(struct sk_buff, hash);
emit(ARM_LDR_I(r_A, r_skb, off), ctx);
break;
- case BPF_S_ANC_VLAN_TAG:
- case BPF_S_ANC_VLAN_TAG_PRESENT:
+ case BPF_ANC | SKF_AD_VLAN_TAG:
+ case BPF_ANC | SKF_AD_VLAN_TAG_PRESENT:
ctx->seen |= SEEN_SKB;
BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff, vlan_tci) != 2);
off = offsetof(struct sk_buff, vlan_tci);
emit(ARM_LDRH_I(r_A, r_skb, off), ctx);
- if (inst->code == BPF_S_ANC_VLAN_TAG)
+ if (code == (BPF_ANC | SKF_AD_VLAN_TAG))
OP_IMM3(ARM_AND, r_A, r_A, VLAN_VID_MASK, ctx);
else
OP_IMM3(ARM_AND, r_A, r_A, VLAN_TAG_PRESENT, ctx);
break;
- case BPF_S_ANC_QUEUE:
+ case BPF_ANC | SKF_AD_QUEUE:
ctx->seen |= SEEN_SKB;
BUILD_BUG_ON(FIELD_SIZEOF(struct sk_buff,
queue_mapping) != 2);