summaryrefslogtreecommitdiffstats
path: root/src/plugins/nat/nat_inlines.h
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/nat/nat_inlines.h')
-rw-r--r--src/plugins/nat/nat_inlines.h134
1 files changed, 127 insertions, 7 deletions
diff --git a/src/plugins/nat/nat_inlines.h b/src/plugins/nat/nat_inlines.h
index a7ab235f40e..2f68ed4a700 100644
--- a/src/plugins/nat/nat_inlines.h
+++ b/src/plugins/nat/nat_inlines.h
@@ -23,6 +23,126 @@
#include <nat/nat.h>
#include <nat/nat_ha.h>
+static inline uword
+nat_pre_node_fn_inline (vlib_main_t * vm,
+ vlib_node_runtime_t * node,
+ vlib_frame_t * frame, u32 def_next)
+{
+ u32 n_left_from, *from, *to_next;
+ u16 next_index;
+
+ from = vlib_frame_vector_args (frame);
+ n_left_from = frame->n_vectors;
+ next_index = node->cached_next_index;
+
+ while (n_left_from > 0)
+ {
+ u32 n_left_to_next;
+
+ vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
+
+ while (n_left_from >= 4 && n_left_to_next >= 2)
+ {
+ u32 next0, next1;
+ u32 arc_next0, arc_next1;
+ u32 bi0, bi1;
+ vlib_buffer_t *b0, *b1;
+
+ /* Prefetch next iteration. */
+ {
+ vlib_buffer_t *p2, *p3;
+
+ p2 = vlib_get_buffer (vm, from[2]);
+ p3 = vlib_get_buffer (vm, from[3]);
+
+ vlib_prefetch_buffer_header (p2, LOAD);
+ vlib_prefetch_buffer_header (p3, LOAD);
+
+ CLIB_PREFETCH (p2->data, CLIB_CACHE_LINE_BYTES, STORE);
+ CLIB_PREFETCH (p3->data, CLIB_CACHE_LINE_BYTES, STORE);
+ }
+
+ /* speculatively enqueue b0 and b1 to the current next frame */
+ to_next[0] = bi0 = from[0];
+ to_next[1] = bi1 = from[1];
+ from += 2;
+ to_next += 2;
+ n_left_from -= 2;
+ n_left_to_next -= 2;
+
+ b0 = vlib_get_buffer (vm, bi0);
+ b1 = vlib_get_buffer (vm, bi1);
+
+ next0 = def_next;
+ next1 = def_next;
+
+ vnet_feature_next (&arc_next0, b0);
+ vnet_feature_next (&arc_next1, b1);
+
+ nat_buffer_opaque (b0)->arc_next = arc_next0;
+ nat_buffer_opaque (b1)->arc_next = arc_next1;
+
+ if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)))
+ {
+ if (b0->flags & VLIB_BUFFER_IS_TRACED)
+ {
+ nat_pre_trace_t *t =
+ vlib_add_trace (vm, node, b0, sizeof (*t));
+ t->next_index = next0;
+ }
+ if (b1->flags & VLIB_BUFFER_IS_TRACED)
+ {
+ nat_pre_trace_t *t =
+ vlib_add_trace (vm, node, b0, sizeof (*t));
+ t->next_index = next0;
+ }
+ }
+
+ /* verify speculative enqueues, maybe switch current next frame */
+ vlib_validate_buffer_enqueue_x2 (vm, node, next_index,
+ to_next, n_left_to_next,
+ bi0, bi1, next0, next1);
+ }
+
+ while (n_left_from > 0 && n_left_to_next > 0)
+ {
+ u32 next0;
+ u32 arc_next0;
+ u32 bi0;
+ vlib_buffer_t *b0;
+
+ /* speculatively enqueue b0 to the current next frame */
+ bi0 = from[0];
+ to_next[0] = bi0;
+ from += 1;
+ to_next += 1;
+ n_left_from -= 1;
+ n_left_to_next -= 1;
+
+ b0 = vlib_get_buffer (vm, bi0);
+ next0 = def_next;
+ vnet_feature_next (&arc_next0, b0);
+ nat_buffer_opaque (b0)->arc_next = arc_next0;
+
+ if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
+ && (b0->flags & VLIB_BUFFER_IS_TRACED)))
+ {
+ nat_pre_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t));
+ t->next_index = next0;
+ }
+
+ /* verify speculative enqueue, maybe switch current next frame */
+ vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
+ to_next, n_left_to_next,
+ bi0, next0);
+ }
+
+ vlib_put_next_frame (vm, node, next_index, n_left_to_next);
+ }
+
+ return frame->n_vectors;
+}
+
always_inline u32
ip_proto_to_snat_proto (u8 ip_proto)
{
@@ -552,13 +672,13 @@ snat_not_translate_fast (snat_main_t * sm, vlib_node_runtime_t * node,
return 1;
snat_interface_t *i;
- pool_foreach (i, sm->interfaces, (
- {
- /* NAT packet aimed at outside interface */
- if ((nat_interface_is_outside (i))
- && (sw_if_index ==
- i->sw_if_index)) return 0;}
- ));
+ /* *INDENT-OFF* */
+ pool_foreach (i, sm->interfaces, ({
+ /* NAT packet aimed at outside interface */
+ if ((nat_interface_is_outside (i)) && (sw_if_index == i->sw_if_index))
+ return 0;
+ }));
+ /* *INDENT-ON* */
}
return 1;