aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--src/plugins/nat/in2out_ed.c3
-rwxr-xr-xsrc/plugins/nat/nat.c24
-rw-r--r--src/plugins/nat/nat.h5
-rw-r--r--src/plugins/nat/nat44_classify.c148
-rw-r--r--src/plugins/nat/nat44_handoff.c17
-rw-r--r--src/plugins/nat/nat_inlines.h5
-rw-r--r--src/plugins/nat/out2in_ed.c4
-rw-r--r--src/vnet/feature/registration.c2
8 files changed, 160 insertions, 48 deletions
diff --git a/src/plugins/nat/in2out_ed.c b/src/plugins/nat/in2out_ed.c
index f96f6367781..7ad66553fe9 100644
--- a/src/plugins/nat/in2out_ed.c
+++ b/src/plugins/nat/in2out_ed.c
@@ -1591,7 +1591,8 @@ format_nat_pre_trace (u8 * s, va_list * args)
CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
nat_pre_trace_t *t = va_arg (*args, nat_pre_trace_t *);
- return format (s, "in2out next_index %d", t->next_index);
+ return format (s, "in2out next_index %d arc_next_index %d", t->next_index,
+ t->arc_next_index);
}
VLIB_NODE_FN (nat_pre_in2out_node)
diff --git a/src/plugins/nat/nat.c b/src/plugins/nat/nat.c
index cc0789d1194..125a969313c 100755
--- a/src/plugins/nat/nat.c
+++ b/src/plugins/nat/nat.c
@@ -1960,11 +1960,11 @@ snat_interface_add_del (u32 sw_if_index, u8 is_inside, int is_del)
if (sm->fq_in2out_index == ~0 && !sm->deterministic && sm->num_workers > 1)
sm->fq_in2out_index =
- vlib_frame_queue_main_init (sm->handoff_in2out_index, NAT_FQ_NELTS);
+ vlib_frame_queue_main_init (sm->in2out_node_index, NAT_FQ_NELTS);
if (sm->fq_out2in_index == ~0 && !sm->deterministic && sm->num_workers > 1)
sm->fq_out2in_index =
- vlib_frame_queue_main_init (sm->handoff_out2in_index, NAT_FQ_NELTS);
+ vlib_frame_queue_main_init (sm->out2in_node_index, NAT_FQ_NELTS);
if (!is_inside)
{
@@ -2326,11 +2326,11 @@ feature_set:
fq:
if (sm->fq_in2out_output_index == ~0 && sm->num_workers > 1)
sm->fq_in2out_output_index =
- vlib_frame_queue_main_init (sm->handoff_in2out_output_index, 0);
+ vlib_frame_queue_main_init (sm->in2out_output_node_index, 0);
if (sm->fq_out2in_index == ~0 && sm->num_workers > 1)
sm->fq_out2in_index =
- vlib_frame_queue_main_init (sm->handoff_out2in_index, 0);
+ vlib_frame_queue_main_init (sm->out2in_node_index, 0);
/* *INDENT-OFF* */
pool_foreach (i, sm->output_feature_interfaces,
@@ -3994,7 +3994,6 @@ snat_config (vlib_main_t * vm, unformat_input_t * input)
{
snat_main_t *sm = &snat_main;
nat66_main_t *nm = &nat66_main;
- //dslite_main_t *dm = &dslite_main;
snat_main_per_thread_data_t *tsm;
u32 static_mapping_buckets = 1024;
@@ -4151,10 +4150,6 @@ snat_config (vlib_main_t * vm, unformat_input_t * input)
sm->worker_in2out_cb = nat44_ed_get_worker_in2out_cb;
sm->worker_out2in_cb = nat44_ed_get_worker_out2in_cb;
- sm->handoff_out2in_index = nat_pre_out2in_node.index;
- sm->handoff_in2out_index = nat_pre_in2out_node.index;
- sm->handoff_in2out_output_index = nat44_ed_in2out_output_node.index;
-
sm->in2out_node_index = nat44_ed_in2out_node.index;
sm->in2out_output_node_index = nat44_ed_in2out_output_node.index;
sm->out2in_node_index = nat44_ed_out2in_node.index;
@@ -4170,13 +4165,10 @@ snat_config (vlib_main_t * vm, unformat_input_t * input)
sm->worker_in2out_cb = snat_get_worker_in2out_cb;
sm->worker_out2in_cb = snat_get_worker_out2in_cb;
- sm->handoff_out2in_index = snat_out2in_node.index;
- sm->handoff_in2out_index = snat_in2out_node.index;
- sm->handoff_in2out_output_index = snat_in2out_output_node.index;
-
sm->in2out_node_index = snat_in2out_node.index;
sm->in2out_output_node_index = snat_in2out_output_node.index;
sm->out2in_node_index = snat_out2in_node.index;
+
sm->icmp_match_in2out_cb = icmp_match_in2out_slow;
sm->icmp_match_out2in_cb = icmp_match_out2in_slow;
nat_ha_init (vm, nat_ha_sadd_cb, nat_ha_sdel_cb, nat_ha_sref_cb);
@@ -4565,13 +4557,15 @@ VLIB_REGISTER_NODE (nat_default_node) = {
.next_nodes = {
[NAT_NEXT_DROP] = "error-drop",
[NAT_NEXT_ICMP_ERROR] = "ip4-icmp-error",
- [NAT_NEXT_IN2OUT_PRE] = "nat-pre-in2out",
- [NAT_NEXT_OUT2IN_PRE] = "nat-pre-out2in",
+ //[NAT_NEXT_IN2OUT_PRE] = "nat-pre-in2out",
+ //[NAT_NEXT_OUT2IN_PRE] = "nat-pre-out2in",
[NAT_NEXT_IN2OUT_ED_FAST_PATH] = "nat44-ed-in2out",
[NAT_NEXT_IN2OUT_ED_SLOW_PATH] = "nat44-ed-in2out-slowpath",
[NAT_NEXT_IN2OUT_ED_OUTPUT_SLOW_PATH] = "nat44-ed-in2out-output-slowpath",
[NAT_NEXT_OUT2IN_ED_FAST_PATH] = "nat44-ed-out2in",
[NAT_NEXT_OUT2IN_ED_SLOW_PATH] = "nat44-ed-out2in-slowpath",
+ [NAT_NEXT_IN2OUT_CLASSIFY] = "nat44-in2out-worker-handoff",
+ [NAT_NEXT_OUT2IN_CLASSIFY] = "nat44-out2in-worker-handoff",
},
};
/* *INDENT-ON* */
diff --git a/src/plugins/nat/nat.h b/src/plugins/nat/nat.h
index 4a6631c463f..33af754fc85 100644
--- a/src/plugins/nat/nat.h
+++ b/src/plugins/nat/nat.h
@@ -50,19 +50,20 @@ typedef enum
{
NAT_NEXT_DROP,
NAT_NEXT_ICMP_ERROR,
- NAT_NEXT_IN2OUT_PRE,
- NAT_NEXT_OUT2IN_PRE,
NAT_NEXT_IN2OUT_ED_FAST_PATH,
NAT_NEXT_IN2OUT_ED_SLOW_PATH,
NAT_NEXT_IN2OUT_ED_OUTPUT_SLOW_PATH,
NAT_NEXT_OUT2IN_ED_FAST_PATH,
NAT_NEXT_OUT2IN_ED_SLOW_PATH,
+ NAT_NEXT_IN2OUT_CLASSIFY,
+ NAT_NEXT_OUT2IN_CLASSIFY,
NAT_N_NEXT,
} nat_next_t;
typedef struct
{
u32 next_index;
+ u32 arc_next_index;
} nat_pre_trace_t;
/* session key (4-tuple) */
diff --git a/src/plugins/nat/nat44_classify.c b/src/plugins/nat/nat44_classify.c
index a57f24177ab..b4645dc42b9 100644
--- a/src/plugins/nat/nat44_classify.c
+++ b/src/plugins/nat/nat44_classify.c
@@ -85,7 +85,7 @@ nat44_classify_node_fn_inline (vlib_main_t * vm,
snat_main_t *sm = &snat_main;
snat_static_mapping_t *m;
u32 *fragments_to_drop = 0;
- u32 next_in2out = 0, next_out2in = 0, frag_cached = 0;
+ u32 next_in2out = 0, next_out2in = 0;
from = vlib_frame_vector_args (frame);
n_left_from = frame->n_vectors;
@@ -189,9 +189,122 @@ nat44_classify_node_fn_inline (vlib_main_t * vm,
NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
vlib_node_increment_counter (vm, node->node_index,
NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
- vlib_node_increment_counter (vm, node->node_index,
- NAT44_CLASSIFY_ERROR_FRAG_CACHED, frag_cached);
+ return frame->n_vectors;
+}
+
+static inline uword
+nat44_handoff_classify_node_fn_inline (vlib_main_t * vm,
+ vlib_node_runtime_t * node,
+ vlib_frame_t * frame)
+{
+ u32 n_left_from, *from, *to_next;
+ nat44_classify_next_t next_index;
+ snat_main_t *sm = &snat_main;
+ snat_static_mapping_t *m;
+ u32 *fragments_to_drop = 0;
+ u32 next_in2out = 0, next_out2in = 0;
+
+ from = vlib_frame_vector_args (frame);
+ n_left_from = frame->n_vectors;
+ next_index = node->cached_next_index;
+
+ while (n_left_from > 0)
+ {
+ u32 n_left_to_next;
+
+ vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
+ while (n_left_from > 0 && n_left_to_next > 0)
+ {
+ u32 bi0;
+ vlib_buffer_t *b0;
+ u32 next0 = NAT_NEXT_IN2OUT_CLASSIFY;
+ ip4_header_t *ip0;
+ snat_address_t *ap;
+ snat_session_key_t m_key0;
+ clib_bihash_kv_8_8_t kv0, value0;
+
+ /* speculatively enqueue b0 to the current next frame */
+ bi0 = from[0];
+ to_next[0] = bi0;
+ from += 1;
+ to_next += 1;
+ n_left_from -= 1;
+ n_left_to_next -= 1;
+
+ b0 = vlib_get_buffer (vm, bi0);
+ ip0 = vlib_buffer_get_current (b0);
+
+ /* *INDENT-OFF* */
+ vec_foreach (ap, sm->addresses)
+ {
+ if (ip0->dst_address.as_u32 == ap->addr.as_u32)
+ {
+ next0 = NAT_NEXT_OUT2IN_CLASSIFY;
+ goto enqueue0;
+ }
+ }
+ /* *INDENT-ON* */
+
+ if (PREDICT_FALSE (pool_elts (sm->static_mappings)))
+ {
+ m_key0.addr = ip0->dst_address;
+ m_key0.port = 0;
+ m_key0.protocol = 0;
+ m_key0.fib_index = 0;
+ kv0.key = m_key0.as_u64;
+ /* try to classify the fragment based on IP header alone */
+ if (!clib_bihash_search_8_8 (&sm->static_mapping_by_external,
+ &kv0, &value0))
+ {
+ m = pool_elt_at_index (sm->static_mappings, value0.value);
+ if (m->local_addr.as_u32 != m->external_addr.as_u32)
+ next0 = NAT_NEXT_OUT2IN_CLASSIFY;
+ goto enqueue0;
+ }
+ m_key0.port =
+ clib_net_to_host_u16 (vnet_buffer (b0)->ip.reass.l4_dst_port);
+ m_key0.protocol = ip_proto_to_snat_proto (ip0->protocol);
+ kv0.key = m_key0.as_u64;
+ if (!clib_bihash_search_8_8
+ (&sm->static_mapping_by_external, &kv0, &value0))
+ {
+ m = pool_elt_at_index (sm->static_mappings, value0.value);
+ if (m->local_addr.as_u32 != m->external_addr.as_u32)
+ next0 = NAT_NEXT_OUT2IN_CLASSIFY;
+ }
+ }
+
+ enqueue0:
+ if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
+ && (b0->flags & VLIB_BUFFER_IS_TRACED)))
+ {
+ nat44_classify_trace_t *t =
+ vlib_add_trace (vm, node, b0, sizeof (*t));
+ t->cached = 0;
+ t->next_in2out = next0 == NAT_NEXT_IN2OUT_CLASSIFY ? 1 : 0;
+ }
+
+ next_in2out += next0 == NAT_NEXT_IN2OUT_CLASSIFY;
+ next_out2in += next0 == NAT_NEXT_OUT2IN_CLASSIFY;
+
+ /* verify speculative enqueue, maybe switch current next frame */
+ vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
+ to_next, n_left_to_next,
+ bi0, next0);
+ }
+
+ vlib_put_next_frame (vm, node, next_index, n_left_to_next);
+ }
+
+ nat_send_all_to_node (vm, fragments_to_drop, node, 0, NAT_NEXT_DROP);
+
+ vec_free (fragments_to_drop);
+
+ vlib_node_increment_counter (vm, node->node_index,
+ NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
+ vlib_node_increment_counter (vm, node->node_index,
+ NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
return frame->n_vectors;
}
@@ -207,8 +320,7 @@ nat44_ed_classify_node_fn_inline (vlib_main_t * vm,
u32 thread_index = vm->thread_index;
snat_main_per_thread_data_t *tsm = &sm->per_thread_data[thread_index];
u32 *fragments_to_drop = 0;
- u32 next_in2out = 0, next_out2in = 0, frag_cached = 0;
- u8 in_loopback = 0;
+ u32 next_in2out = 0, next_out2in = 0;
from = vlib_frame_vector_args (frame);
n_left_from = frame->n_vectors;
@@ -224,8 +336,8 @@ nat44_ed_classify_node_fn_inline (vlib_main_t * vm,
{
u32 bi0;
vlib_buffer_t *b0;
- u32 next0 =
- NAT_NEXT_IN2OUT_ED_FAST_PATH, sw_if_index0, rx_fib_index0;
+ u32 next0 = NAT_NEXT_IN2OUT_ED_FAST_PATH;
+ u32 sw_if_index0, rx_fib_index0;
ip4_header_t *ip0;
snat_address_t *ap;
snat_session_key_t m_key0;
@@ -243,13 +355,9 @@ nat44_ed_classify_node_fn_inline (vlib_main_t * vm,
b0 = vlib_get_buffer (vm, bi0);
ip0 = vlib_buffer_get_current (b0);
- if (!in_loopback)
- {
- u32 arc_next = 0;
-
- vnet_feature_next (&arc_next, b0);
- vnet_buffer2 (b0)->nat.arc_next = arc_next;
- }
+ u32 arc_next;
+ vnet_feature_next (&arc_next, b0);
+ vnet_buffer2 (b0)->nat.arc_next = arc_next;
if (ip0->protocol != IP_PROTOCOL_ICMP)
{
@@ -341,9 +449,6 @@ nat44_ed_classify_node_fn_inline (vlib_main_t * vm,
NAT44_CLASSIFY_ERROR_NEXT_IN2OUT, next_in2out);
vlib_node_increment_counter (vm, node->node_index,
NAT44_CLASSIFY_ERROR_NEXT_OUT2IN, next_out2in);
- vlib_node_increment_counter (vm, node->node_index,
- NAT44_CLASSIFY_ERROR_FRAG_CACHED, frag_cached);
-
return frame->n_vectors;
}
@@ -414,21 +519,16 @@ VLIB_NODE_FN (nat44_handoff_classify_node) (vlib_main_t * vm,
vlib_node_runtime_t * node,
vlib_frame_t * frame)
{
- return nat44_classify_node_fn_inline (vm, node, frame);
+ return nat44_handoff_classify_node_fn_inline (vm, node, frame);
}
/* *INDENT-OFF* */
VLIB_REGISTER_NODE (nat44_handoff_classify_node) = {
.name = "nat44-handoff-classify",
.vector_size = sizeof (u32),
+ .sibling_of = "nat-default",
.format_trace = format_nat44_classify_trace,
.type = VLIB_NODE_TYPE_INTERNAL,
- .n_next_nodes = NAT44_CLASSIFY_N_NEXT,
- .next_nodes = {
- [NAT44_CLASSIFY_NEXT_IN2OUT] = "nat44-in2out-worker-handoff",
- [NAT44_CLASSIFY_NEXT_OUT2IN] = "nat44-out2in-worker-handoff",
- [NAT44_CLASSIFY_NEXT_DROP] = "error-drop",
- },
};
/* *INDENT-ON* */
diff --git a/src/plugins/nat/nat44_handoff.c b/src/plugins/nat/nat44_handoff.c
index d221e5ba7cd..69b3b6083a7 100644
--- a/src/plugins/nat/nat44_handoff.c
+++ b/src/plugins/nat/nat44_handoff.c
@@ -100,6 +100,7 @@ nat44_worker_handoff_fn_inline (vlib_main_t * vm,
while (n_left_from >= 4)
{
+ u32 arc_next0, arc_next1, arc_next2, arc_next3;
u32 sw_if_index0, sw_if_index1, sw_if_index2, sw_if_index3;
u32 rx_fib_index0, rx_fib_index1, rx_fib_index2, rx_fib_index3;
u32 iph_offset0 = 0, iph_offset1 = 0, iph_offset2 = 0, iph_offset3 = 0;
@@ -134,6 +135,16 @@ nat44_worker_handoff_fn_inline (vlib_main_t * vm,
ip3 = (ip4_header_t *) ((u8 *) vlib_buffer_get_current (b[3]) +
iph_offset3);
+ vnet_feature_next (&arc_next0, b[0]);
+ vnet_feature_next (&arc_next1, b[1]);
+ vnet_feature_next (&arc_next2, b[2]);
+ vnet_feature_next (&arc_next3, b[3]);
+
+ vnet_buffer2 (b[0])->nat.arc_next = arc_next0;
+ vnet_buffer2 (b[1])->nat.arc_next = arc_next1;
+ vnet_buffer2 (b[2])->nat.arc_next = arc_next2;
+ vnet_buffer2 (b[3])->nat.arc_next = arc_next3;
+
sw_if_index0 = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
sw_if_index1 = vnet_buffer (b[1])->sw_if_index[VLIB_RX];
sw_if_index2 = vnet_buffer (b[2])->sw_if_index[VLIB_RX];
@@ -186,6 +197,7 @@ nat44_worker_handoff_fn_inline (vlib_main_t * vm,
while (n_left_from > 0)
{
+ u32 arc_next0;
u32 sw_if_index0;
u32 rx_fib_index0;
u32 iph_offset0 = 0;
@@ -198,6 +210,9 @@ nat44_worker_handoff_fn_inline (vlib_main_t * vm,
ip0 = (ip4_header_t *) ((u8 *) vlib_buffer_get_current (b[0]) +
iph_offset0);
+ vnet_feature_next (&arc_next0, b[0]);
+ vnet_buffer2 (b[0])->nat.arc_next = arc_next0;
+
sw_if_index0 = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
rx_fib_index0 = ip4_fib_table_get_index_for_sw_if_index (sw_if_index0);
@@ -262,8 +277,6 @@ nat44_worker_handoff_fn_inline (vlib_main_t * vm,
return frame->n_vectors;
}
-
-
VLIB_NODE_FN (snat_in2out_worker_handoff_node) (vlib_main_t * vm,
vlib_node_runtime_t * node,
vlib_frame_t * frame)
diff --git a/src/plugins/nat/nat_inlines.h b/src/plugins/nat/nat_inlines.h
index a0803566ccc..a5fa84dcfa9 100644
--- a/src/plugins/nat/nat_inlines.h
+++ b/src/plugins/nat/nat_inlines.h
@@ -89,12 +89,14 @@ nat_pre_node_fn_inline (vlib_main_t * vm,
nat_pre_trace_t *t =
vlib_add_trace (vm, node, b0, sizeof (*t));
t->next_index = next0;
+ t->arc_next_index = arc_next0;
}
if (b1->flags & VLIB_BUFFER_IS_TRACED)
{
nat_pre_trace_t *t =
vlib_add_trace (vm, node, b0, sizeof (*t));
- t->next_index = next0;
+ t->next_index = next1;
+ t->arc_next_index = arc_next1;
}
}
@@ -129,6 +131,7 @@ nat_pre_node_fn_inline (vlib_main_t * vm,
{
nat_pre_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t));
t->next_index = next0;
+ t->arc_next_index = arc_next0;
}
/* verify speculative enqueue, maybe switch current next frame */
diff --git a/src/plugins/nat/out2in_ed.c b/src/plugins/nat/out2in_ed.c
index 667d33e6266..a30ffd972d1 100644
--- a/src/plugins/nat/out2in_ed.c
+++ b/src/plugins/nat/out2in_ed.c
@@ -1074,7 +1074,6 @@ nat44_ed_out2in_slow_path_node_fn_inline (vlib_main_t * vm,
next0 = NAT_NEXT_IN2OUT_ED_FAST_PATH;
goto trace0;
}
- // TEST:
if (sm->num_workers > 1)
create_bypass_for_fwd_worker (sm, b0, ip0,
rx_fib_index0);
@@ -1298,7 +1297,8 @@ format_nat_pre_trace (u8 * s, va_list * args)
CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
nat_pre_trace_t *t = va_arg (*args, nat_pre_trace_t *);
- return format (s, "out2in next_index %d", t->next_index);
+ return format (s, "out2in next_index %d arc_next_index %d", t->next_index,
+ t->arc_next_index);
}
VLIB_NODE_FN (nat_pre_out2in_node) (vlib_main_t * vm,
diff --git a/src/vnet/feature/registration.c b/src/vnet/feature/registration.c
index 030486a7489..537a4ada6e4 100644
--- a/src/vnet/feature/registration.c
+++ b/src/vnet/feature/registration.c
@@ -50,7 +50,7 @@
<CODE><PRE>
VNET_FEATURE_INIT (ip4_lookup, static) =
{
- .arch_name = "ip4-unicast",
+ .arc_name = "ip4-unicast",
.node_name = "my-ip4-unicast-feature",
.runs_before = VLIB_FEATURES ("ip4-lookup")
};