aboutsummaryrefslogtreecommitdiffstats
path: root/src/vnet/classify
diff options
context:
space:
mode:
Diffstat (limited to 'src/vnet/classify')
-rw-r--r--src/vnet/classify/classify.api40
-rw-r--r--src/vnet/classify/classify_api.c137
-rw-r--r--src/vnet/classify/flow_classify.c4
-rw-r--r--src/vnet/classify/flow_classify_node.c8
-rw-r--r--src/vnet/classify/in_out_acl.c132
-rw-r--r--src/vnet/classify/in_out_acl.h16
-rw-r--r--src/vnet/classify/ip_classify.c8
-rw-r--r--src/vnet/classify/pcap_classify.h6
-rw-r--r--src/vnet/classify/policer_classify.c4
-rw-r--r--src/vnet/classify/trace_classify.h5
-rw-r--r--src/vnet/classify/vnet_classify.c174
-rw-r--r--src/vnet/classify/vnet_classify.h343
12 files changed, 529 insertions, 348 deletions
diff --git a/src/vnet/classify/classify.api b/src/vnet/classify/classify.api
index c569fe6a599..00963f6fb6a 100644
--- a/src/vnet/classify/classify.api
+++ b/src/vnet/classify/classify.api
@@ -420,6 +420,46 @@ autoreply define input_acl_set_interface
bool is_add;
};
+/** \brief Add/del punt ACL
+ @param client_index - opaque cookie to identify the sender
+ @param context - sender context, to match reply w/ request
+ @param ip4_table_index - ip4 punt classify table index (~0 for skip)
+ @param ip6_table_index - ip6 punt classify table index (~0 for skip)
+ @param is_add - add punt ACL if non-zero, else delete
+*/
+autoreply define punt_acl_add_del
+{
+ u32 client_index;
+ u32 context;
+ u32 ip4_table_index [default=0xffffffff];
+ u32 ip6_table_index [default=0xffffffff];
+ bool is_add [default=true];
+};
+
+/** \brief Get classify table ids configured for punt ACL
+ @param client_index - opaque cookie to identify the sender
+ @param context - sender context, to match reply w/ request
+*/
+define punt_acl_get
+{
+ u32 client_index;
+ u32 context;
+};
+
+/** \brief Reply for punt_acl_get
+ @param context - sender context which was passed in the request
+ @param retval - return value (0 for success)
+ @param ip4_table_index - ip4 punt classify table index (~0 for none)
+ @param ip6_table_index - ip6 punt classify table index (~0 for none)
+*/
+define punt_acl_get_reply
+{
+ u32 context;
+ i32 retval;
+ u32 ip4_table_index;
+ u32 ip6_table_index;
+};
+
/** \brief Set/unset output ACL interface
@param client_index - opaque cookie to identify the sender
@param context - sender context, to match reply w/ request
diff --git a/src/vnet/classify/classify_api.c b/src/vnet/classify/classify_api.c
index 39f7b98007d..fc57b006d37 100644
--- a/src/vnet/classify/classify_api.c
+++ b/src/vnet/classify/classify_api.c
@@ -91,7 +91,8 @@ static void vl_api_classify_pcap_lookup_table_t_handler
out:
rmp = vl_msg_api_alloc (sizeof (*rmp));
- rmp->_vl_msg_id = ntohs (VL_API_CLASSIFY_PCAP_LOOKUP_TABLE_REPLY);
+ rmp->_vl_msg_id =
+ ntohs (REPLY_MSG_ID_BASE + VL_API_CLASSIFY_PCAP_LOOKUP_TABLE_REPLY);
rmp->context = mp->context;
rmp->retval = ntohl (rv);
rmp->table_index = htonl (table_index);
@@ -114,9 +115,8 @@ static void vl_api_classify_pcap_set_table_t_handler
u32 table_index = ntohl (mp->table_index);
u32 sw_if_index = ntohl (mp->sw_if_index);
- if (sw_if_index == ~0
- || sw_if_index >= vec_len (cm->classify_table_index_by_sw_if_index)
- || (table_index != ~0 && pool_is_free_index (cm->tables, table_index)))
+ if (sw_if_index == ~0 ||
+ (table_index != ~0 && pool_is_free_index (cm->tables, table_index)))
{
rv = VNET_API_ERROR_INVALID_VALUE;
goto out;
@@ -132,7 +132,8 @@ static void vl_api_classify_pcap_set_table_t_handler
out:
rmp = vl_msg_api_alloc (sizeof (*rmp));
- rmp->_vl_msg_id = ntohs (VL_API_CLASSIFY_PCAP_SET_TABLE_REPLY);
+ rmp->_vl_msg_id =
+ ntohs (REPLY_MSG_ID_BASE + VL_API_CLASSIFY_PCAP_SET_TABLE_REPLY);
rmp->context = mp->context;
rmp->retval = ntohl (rv);
rmp->table_index = htonl (table_index);
@@ -181,7 +182,8 @@ static void vl_api_classify_pcap_get_tables_t_handler
out:
count = vec_len (tables);
rmp = vl_msg_api_alloc_as_if_client (sizeof (*rmp) + count * sizeof (u32));
- rmp->_vl_msg_id = ntohs (VL_API_CLASSIFY_PCAP_GET_TABLES_REPLY);
+ rmp->_vl_msg_id =
+ ntohs (REPLY_MSG_ID_BASE + VL_API_CLASSIFY_PCAP_GET_TABLES_REPLY);
rmp->context = mp->context;
rmp->retval = ntohl (rv);
rmp->count = htonl (count);
@@ -233,7 +235,8 @@ static void vl_api_classify_trace_lookup_table_t_handler
out:
rmp = vl_msg_api_alloc (sizeof (*rmp));
- rmp->_vl_msg_id = ntohs ((VL_API_CLASSIFY_TRACE_LOOKUP_TABLE_REPLY));
+ rmp->_vl_msg_id =
+ ntohs ((REPLY_MSG_ID_BASE + VL_API_CLASSIFY_TRACE_LOOKUP_TABLE_REPLY));
rmp->context = mp->context;
rmp->retval = ntohl (rv);
rmp->table_index = htonl (table_index);
@@ -270,7 +273,8 @@ static void vl_api_classify_trace_set_table_t_handler
out:
rmp = vl_msg_api_alloc (sizeof (*rmp));
- rmp->_vl_msg_id = ntohs ((VL_API_CLASSIFY_TRACE_SET_TABLE_REPLY));
+ rmp->_vl_msg_id =
+ ntohs ((REPLY_MSG_ID_BASE + VL_API_CLASSIFY_TRACE_SET_TABLE_REPLY));
rmp->context = mp->context;
rmp->retval = ntohl (rv);
rmp->table_index = htonl (table_index);
@@ -311,7 +315,8 @@ static void vl_api_classify_trace_get_tables_t_handler
out:
count = vec_len (tables);
rmp = vl_msg_api_alloc_as_if_client (sizeof (*rmp) + count * sizeof (u32));
- rmp->_vl_msg_id = ntohs (VL_API_CLASSIFY_TRACE_GET_TABLES_REPLY);
+ rmp->_vl_msg_id =
+ ntohs (REPLY_MSG_ID_BASE + VL_API_CLASSIFY_TRACE_GET_TABLES_REPLY);
rmp->context = mp->context;
rmp->retval = ntohl (rv);
rmp->count = htonl (count);
@@ -374,7 +379,6 @@ static void vl_api_classify_add_del_table_t_handler
current_data_flag, current_data_offset, mp->is_add, mp->del_chain);
out:
- /* *INDENT-OFF* */
REPLY_MACRO2(VL_API_CLASSIFY_ADD_DEL_TABLE_REPLY,
({
if (rv == 0 && mp->is_add)
@@ -391,7 +395,6 @@ out:
rmp->new_table_index = ~0;
}
}));
- /* *INDENT-ON* */
}
static void vl_api_classify_add_del_session_t_handler
@@ -469,7 +472,7 @@ send_policer_classify_details (u32 sw_if_index,
mp = vl_msg_api_alloc (sizeof (*mp));
clib_memset (mp, 0, sizeof (*mp));
- mp->_vl_msg_id = ntohs (VL_API_POLICER_CLASSIFY_DETAILS);
+ mp->_vl_msg_id = ntohs (REPLY_MSG_ID_BASE + VL_API_POLICER_CLASSIFY_DETAILS);
mp->context = context;
mp->sw_if_index = htonl (sw_if_index);
mp->table_index = htonl (table_index);
@@ -528,17 +531,16 @@ vl_api_classify_table_ids_t_handler (vl_api_classify_table_ids_t * mp)
u32 *table_ids = 0;
u32 count;
- /* *INDENT-OFF* */
pool_foreach (t, cm->tables)
{
vec_add1 (table_ids, ntohl(t - cm->tables));
}
- /* *INDENT-ON* */
count = vec_len (table_ids);
vl_api_classify_table_ids_reply_t *rmp;
rmp = vl_msg_api_alloc_as_if_client (sizeof (*rmp) + count * sizeof (u32));
- rmp->_vl_msg_id = ntohs (VL_API_CLASSIFY_TABLE_IDS_REPLY);
+ rmp->_vl_msg_id =
+ ntohs (REPLY_MSG_ID_BASE + VL_API_CLASSIFY_TABLE_IDS_REPLY);
rmp->context = mp->context;
rmp->count = ntohl (count);
clib_memcpy (rmp->ids, table_ids, count * sizeof (u32));
@@ -589,7 +591,6 @@ static void
BAD_SW_IF_INDEX_LABEL;
- /* *INDENT-OFF* */
REPLY_MACRO2(VL_API_CLASSIFY_TABLE_BY_INTERFACE_REPLY,
({
rmp->sw_if_index = ntohl(sw_if_index);
@@ -597,7 +598,6 @@ static void
rmp->ip4_table_id = ntohl(acl[IN_OUT_ACL_TABLE_IP4]);
rmp->ip6_table_id = ntohl(acl[IN_OUT_ACL_TABLE_IP6]);
}));
- /* *INDENT-ON* */
vec_free (acl);
}
@@ -616,34 +616,35 @@ vl_api_classify_table_info_t_handler (vl_api_classify_table_info_t * mp)
u32 table_id = ntohl (mp->table_id);
vnet_classify_table_t *t;
- /* *INDENT-OFF* */
- pool_foreach (t, cm->tables)
+ pool_foreach (t, cm->tables)
{
- if (table_id == t - cm->tables)
- {
- rmp = vl_msg_api_alloc_as_if_client
- (sizeof (*rmp) + t->match_n_vectors * sizeof (u32x4));
- rmp->_vl_msg_id = ntohs (VL_API_CLASSIFY_TABLE_INFO_REPLY);
- rmp->context = mp->context;
- rmp->table_id = ntohl(table_id);
- rmp->nbuckets = ntohl(t->nbuckets);
- rmp->match_n_vectors = ntohl(t->match_n_vectors);
- rmp->skip_n_vectors = ntohl(t->skip_n_vectors);
- rmp->active_sessions = ntohl(t->active_elements);
- rmp->next_table_index = ntohl(t->next_table_index);
- rmp->miss_next_index = ntohl(t->miss_next_index);
- rmp->mask_length = ntohl(t->match_n_vectors * sizeof (u32x4));
- clib_memcpy(rmp->mask, t->mask, t->match_n_vectors * sizeof(u32x4));
- rmp->retval = 0;
- break;
- }
- }
- /* *INDENT-ON* */
+ if (table_id == t - cm->tables)
+ {
+ rmp = vl_msg_api_alloc_as_if_client (
+ sizeof (*rmp) + t->match_n_vectors * sizeof (u32x4));
+ rmp->_vl_msg_id =
+ ntohs (REPLY_MSG_ID_BASE + VL_API_CLASSIFY_TABLE_INFO_REPLY);
+ rmp->context = mp->context;
+ rmp->table_id = ntohl (table_id);
+ rmp->nbuckets = ntohl (t->nbuckets);
+ rmp->match_n_vectors = ntohl (t->match_n_vectors);
+ rmp->skip_n_vectors = ntohl (t->skip_n_vectors);
+ rmp->active_sessions = ntohl (t->active_elements);
+ rmp->next_table_index = ntohl (t->next_table_index);
+ rmp->miss_next_index = ntohl (t->miss_next_index);
+ rmp->mask_length = ntohl (t->match_n_vectors * sizeof (u32x4));
+ clib_memcpy (rmp->mask, t->mask,
+ t->match_n_vectors * sizeof (u32x4));
+ rmp->retval = 0;
+ break;
+ }
+ }
if (rmp == 0)
{
rmp = vl_msg_api_alloc (sizeof (*rmp));
- rmp->_vl_msg_id = ntohs (VL_API_CLASSIFY_TABLE_INFO_REPLY);
+ rmp->_vl_msg_id =
+ ntohs (REPLY_MSG_ID_BASE + VL_API_CLASSIFY_TABLE_INFO_REPLY);
rmp->context = mp->context;
rmp->retval = ntohl (VNET_API_ERROR_CLASSIFY_TABLE_NOT_FOUND);
}
@@ -659,9 +660,10 @@ send_classify_session_details (vl_api_registration_t * reg,
{
vl_api_classify_session_details_t *rmp;
- rmp = vl_msg_api_alloc (sizeof (*rmp));
+ rmp = vl_msg_api_alloc (sizeof (*rmp) + match_length);
clib_memset (rmp, 0, sizeof (*rmp));
- rmp->_vl_msg_id = ntohs (VL_API_CLASSIFY_SESSION_DETAILS);
+ rmp->_vl_msg_id =
+ ntohs (REPLY_MSG_ID_BASE + VL_API_CLASSIFY_SESSION_DETAILS);
rmp->context = context;
rmp->table_id = ntohl (table_id);
rmp->hit_next_index = ntohl (e->next_index);
@@ -686,7 +688,6 @@ vl_api_classify_session_dump_t_handler (vl_api_classify_session_dump_t * mp)
if (!reg)
return;
- /* *INDENT-OFF* */
pool_foreach (t, cm->tables)
{
if (table_id == t - cm->tables)
@@ -720,7 +721,6 @@ vl_api_classify_session_dump_t_handler (vl_api_classify_session_dump_t * mp)
break;
}
}
- /* *INDENT-ON* */
}
static void
@@ -755,7 +755,7 @@ send_flow_classify_details (u32 sw_if_index,
mp = vl_msg_api_alloc (sizeof (*mp));
clib_memset (mp, 0, sizeof (*mp));
- mp->_vl_msg_id = ntohs (VL_API_FLOW_CLASSIFY_DETAILS);
+ mp->_vl_msg_id = ntohs (REPLY_MSG_ID_BASE + VL_API_FLOW_CLASSIFY_DETAILS);
mp->context = context;
mp->sw_if_index = htonl (sw_if_index);
mp->table_index = htonl (table_index);
@@ -887,6 +887,43 @@ static void vl_api_input_acl_set_interface_t_handler
REPLY_MACRO (VL_API_INPUT_ACL_SET_INTERFACE_REPLY);
}
+static void
+vl_api_punt_acl_add_del_t_handler (vl_api_punt_acl_add_del_t *mp)
+{
+ vlib_main_t *vm = vlib_get_main ();
+ vl_api_punt_acl_add_del_reply_t *rmp;
+ int rv;
+
+ rv = vnet_set_in_out_acl_intfc (
+ vm, 0 /* sw_if_index */, ~0 /* ip4_table_index */,
+ ~0 /* ip6_table_index */, ~0 /* l2_table_index */,
+ ntohl (mp->ip4_table_index), ntohl (mp->ip6_table_index), mp->is_add,
+ 0 /* is_output */);
+
+ REPLY_MACRO (VL_API_PUNT_ACL_ADD_DEL_REPLY);
+}
+
+static void
+vl_api_punt_acl_get_t_handler (vl_api_punt_acl_get_t *mp)
+{
+ vl_api_punt_acl_get_reply_t *rmp;
+ int rv = 0;
+
+ const in_out_acl_main_t *am = &in_out_acl_main;
+
+ u32 *const *tables =
+ am->classify_table_index_by_sw_if_index[IN_OUT_ACL_INPUT_TABLE_GROUP];
+ const u32 *ip4_table = tables[IN_OUT_ACL_TABLE_IP4_PUNT];
+ const u32 *ip6_table = tables[IN_OUT_ACL_TABLE_IP6_PUNT];
+ const u32 ip4_table_index = vec_len (ip4_table) ? ip4_table[0] : ~0;
+ const u32 ip6_table_index = vec_len (ip6_table) ? ip6_table[0] : ~0;
+
+ REPLY_MACRO2 (VL_API_PUNT_ACL_GET_REPLY, ({
+ rmp->ip4_table_index = ntohl (ip4_table_index);
+ rmp->ip6_table_index = ntohl (ip6_table_index);
+ }));
+}
+
static void vl_api_output_acl_set_interface_t_handler
(vl_api_output_acl_set_interface_t * mp)
{
@@ -915,6 +952,16 @@ static void vl_api_output_acl_set_interface_t_handler
static clib_error_t *
classify_api_hookup (vlib_main_t * vm)
{
+ api_main_t *am = vlibapi_get_main ();
+
+ /*
+ * Trace space for classifier mask+match
+ */
+ vl_api_increase_msg_trace_size (am, VL_API_CLASSIFY_ADD_DEL_TABLE,
+ 5 * sizeof (u32x4));
+ vl_api_increase_msg_trace_size (am, VL_API_CLASSIFY_ADD_DEL_SESSION,
+ 5 * sizeof (u32x4));
+
/*
* Set up the (msg_name, crc, message-id) table
*/
diff --git a/src/vnet/classify/flow_classify.c b/src/vnet/classify/flow_classify.c
index afdadc66235..7197558a77a 100644
--- a/src/vnet/classify/flow_classify.c
+++ b/src/vnet/classify/flow_classify.c
@@ -150,7 +150,6 @@ set_flow_classify_command_fn (vlib_main_t * vm,
return 0;
}
-/* *INDENT-OFF* */
VLIB_CLI_COMMAND (set_input_acl_command, static) = {
.path = "set flow classify",
.short_help =
@@ -158,7 +157,6 @@ VLIB_CLI_COMMAND (set_input_acl_command, static) = {
" [ip6-table <index>] [del]",
.function = set_flow_classify_command_fn,
};
-/* *INDENT-ON* */
static uword
unformat_table_type (unformat_input_t * input, va_list * va)
@@ -215,13 +213,11 @@ show_flow_classify_command_fn (vlib_main_t * vm,
return 0;
}
-/* *INDENT-OFF* */
VLIB_CLI_COMMAND (show_flow_classify_command, static) = {
.path = "show classify flow",
.short_help = "show classify flow type [ip4|ip6]",
.function = show_flow_classify_command_fn,
};
-/* *INDENT-ON* */
/*
* fd.io coding-style-patch-verification: ON
diff --git a/src/vnet/classify/flow_classify_node.c b/src/vnet/classify/flow_classify_node.c
index 4989bf0a012..a34bab6190b 100644
--- a/src/vnet/classify/flow_classify_node.c
+++ b/src/vnet/classify/flow_classify_node.c
@@ -184,7 +184,7 @@ flow_classify_inline (vlib_main_t * vm,
u32 table_index0;
vnet_classify_table_t *t0;
vnet_classify_entry_t *e0;
- u64 hash0;
+ u32 hash0;
u8 *h0;
/* Stride 3 seems to work best */
@@ -193,7 +193,7 @@ flow_classify_inline (vlib_main_t * vm,
vlib_buffer_t *p1 = vlib_get_buffer (vm, from[3]);
vnet_classify_table_t *tp1;
u32 table_index1;
- u64 phash1;
+ u32 phash1;
table_index1 = vnet_buffer (p1)->l2_classify.table_index;
@@ -279,7 +279,6 @@ VLIB_NODE_FN (ip4_flow_classify_node) (vlib_main_t * vm,
return flow_classify_inline (vm, node, frame, FLOW_CLASSIFY_TABLE_IP4);
}
-/* *INDENT-OFF* */
VLIB_REGISTER_NODE (ip4_flow_classify_node) = {
.name = "ip4-flow-classify",
.vector_size = sizeof (u32),
@@ -291,7 +290,6 @@ VLIB_REGISTER_NODE (ip4_flow_classify_node) = {
[FLOW_CLASSIFY_NEXT_INDEX_DROP] = "error-drop",
},
};
-/* *INDENT-ON* */
VLIB_NODE_FN (ip6_flow_classify_node) (vlib_main_t * vm,
vlib_node_runtime_t * node,
@@ -300,7 +298,6 @@ VLIB_NODE_FN (ip6_flow_classify_node) (vlib_main_t * vm,
return flow_classify_inline (vm, node, frame, FLOW_CLASSIFY_TABLE_IP6);
}
-/* *INDENT-OFF* */
VLIB_REGISTER_NODE (ip6_flow_classify_node) = {
.name = "ip6-flow-classify",
.vector_size = sizeof (u32),
@@ -313,7 +310,6 @@ VLIB_REGISTER_NODE (ip6_flow_classify_node) = {
},
};
-/* *INDENT-ON* */
static clib_error_t *
diff --git a/src/vnet/classify/in_out_acl.c b/src/vnet/classify/in_out_acl.c
index 7f5a926212c..af765139332 100644
--- a/src/vnet/classify/in_out_acl.c
+++ b/src/vnet/classify/in_out_acl.c
@@ -21,63 +21,75 @@
in_out_acl_main_t in_out_acl_main;
static int
-vnet_in_out_acl_ip_feature_enable (vlib_main_t * vnm,
- in_out_acl_main_t * am,
- u32 sw_if_index,
- in_out_acl_table_id_t tid,
- int feature_enable, int is_output)
+vnet_in_out_acl_feature_enable (in_out_acl_main_t *am, u32 sw_if_index,
+ in_out_acl_table_id_t tid, int feature_enable,
+ int is_output)
{
+ const char *arc_name, *feature_name;
+ vnet_feature_config_main_t *fcm;
+ u8 arc;
+ int rv;
- if (tid == IN_OUT_ACL_TABLE_L2)
+ switch (tid)
{
+ case IN_OUT_ACL_N_TABLES:
+ return VNET_API_ERROR_NO_SUCH_TABLE;
+ case IN_OUT_ACL_TABLE_L2:
if (is_output)
l2output_intf_bitmap_enable (sw_if_index, L2OUTPUT_FEAT_ACL,
feature_enable);
else
l2input_intf_bitmap_enable (sw_if_index, L2INPUT_FEAT_ACL,
feature_enable);
+ return 0;
+ case IN_OUT_ACL_TABLE_IP4:
+ arc_name = is_output ? "ip4-output" : "ip4-unicast";
+ feature_name = is_output ? "ip4-outacl" : "ip4-inacl";
+ break;
+ case IN_OUT_ACL_TABLE_IP6:
+ arc_name = is_output ? "ip6-output" : "ip6-unicast";
+ feature_name = is_output ? "ip6-outacl" : "ip6-inacl";
+ break;
+ case IN_OUT_ACL_TABLE_IP4_PUNT:
+ if (sw_if_index != 0)
+ return VNET_API_ERROR_INVALID_INTERFACE;
+ arc_name = "ip4-punt";
+ feature_name = "ip4-punt-acl";
+ break;
+ case IN_OUT_ACL_TABLE_IP6_PUNT:
+ if (sw_if_index != 0)
+ return VNET_API_ERROR_INVALID_INTERFACE;
+ arc_name = "ip6-punt";
+ feature_name = "ip6-punt-acl";
+ break;
}
- else
- { /* IP[46] */
- vnet_feature_config_main_t *fcm;
- u8 arc;
- if (tid == IN_OUT_ACL_TABLE_IP4)
- {
- char *arc_name = is_output ? "ip4-output" : "ip4-unicast";
- vnet_feature_enable_disable (arc_name,
- is_output ? "ip4-outacl" : "ip4-inacl",
- sw_if_index, feature_enable, 0, 0);
- arc = vnet_get_feature_arc_index (arc_name);
- }
- else
- {
- char *arc_name = is_output ? "ip6-output" : "ip6-unicast";
- vnet_feature_enable_disable (arc_name,
- is_output ? "ip6-outacl" : "ip6-inacl",
- sw_if_index, feature_enable, 0, 0);
- arc = vnet_get_feature_arc_index (arc_name);
- }
+ rv = vnet_feature_enable_disable (arc_name, feature_name, sw_if_index,
+ feature_enable, 0, 0);
+ if (rv)
+ return rv;
- fcm = vnet_get_feature_arc_config_main (arc);
- am->vnet_config_main[is_output][tid] = &fcm->config_main;
- }
+ arc = vnet_get_feature_arc_index (arc_name);
+ fcm = vnet_get_feature_arc_config_main (arc);
+ am->vnet_config_main[is_output][tid] = &fcm->config_main;
return 0;
}
int
-vnet_set_in_out_acl_intfc (vlib_main_t * vm, u32 sw_if_index,
- u32 ip4_table_index,
- u32 ip6_table_index, u32 l2_table_index,
- u32 is_add, u32 is_output)
+vnet_set_in_out_acl_intfc (vlib_main_t *vm, u32 sw_if_index,
+ u32 ip4_table_index, u32 ip6_table_index,
+ u32 l2_table_index, u32 ip4_punt_table_index,
+ u32 ip6_punt_table_index, u32 is_add, u32 is_output)
{
in_out_acl_main_t *am = &in_out_acl_main;
vnet_classify_main_t *vcm = am->vnet_classify_main;
- u32 acl[IN_OUT_ACL_N_TABLES] = { ip4_table_index, ip6_table_index,
- l2_table_index
+ u32 acl[IN_OUT_ACL_N_TABLES] = {
+ ip4_table_index, ip6_table_index, l2_table_index,
+ ip4_punt_table_index, ip6_punt_table_index,
};
u32 ti;
+ int rv;
/* Assume that we've validated sw_if_index in the API layer */
@@ -111,8 +123,10 @@ vnet_set_in_out_acl_intfc (vlib_main_t * vm, u32 sw_if_index,
!= ~0)
return 0;
- vnet_in_out_acl_ip_feature_enable (vm, am, sw_if_index, ti, is_add,
- is_output);
+ rv = vnet_in_out_acl_feature_enable (am, sw_if_index, ti, is_add,
+ is_output);
+ if (rv)
+ return rv;
if (is_add)
am->classify_table_index_by_sw_if_index[is_output][ti][sw_if_index] =
@@ -130,9 +144,10 @@ vnet_set_input_acl_intfc (vlib_main_t * vm, u32 sw_if_index,
u32 ip4_table_index,
u32 ip6_table_index, u32 l2_table_index, u32 is_add)
{
- return vnet_set_in_out_acl_intfc (vm, sw_if_index, ip4_table_index,
- ip6_table_index, l2_table_index, is_add,
- IN_OUT_ACL_INPUT_TABLE_GROUP);
+ return vnet_set_in_out_acl_intfc (
+ vm, sw_if_index, ip4_table_index, ip6_table_index, l2_table_index,
+ ~0 /* ip4_punt_table_index */, ~0 /* ip6_punt_table_index */, is_add,
+ IN_OUT_ACL_INPUT_TABLE_GROUP);
}
int
@@ -141,9 +156,10 @@ vnet_set_output_acl_intfc (vlib_main_t * vm, u32 sw_if_index,
u32 ip6_table_index, u32 l2_table_index,
u32 is_add)
{
- return vnet_set_in_out_acl_intfc (vm, sw_if_index, ip4_table_index,
- ip6_table_index, l2_table_index, is_add,
- IN_OUT_ACL_OUTPUT_TABLE_GROUP);
+ return vnet_set_in_out_acl_intfc (
+ vm, sw_if_index, ip4_table_index, ip6_table_index, l2_table_index,
+ ~0 /* ip4_punt_table_index */, ~0 /* ip6_punt_table_index */, is_add,
+ IN_OUT_ACL_OUTPUT_TABLE_GROUP);
}
static clib_error_t *
@@ -155,6 +171,8 @@ set_in_out_acl_command_fn (vlib_main_t * vm,
u32 sw_if_index = ~0;
u32 ip4_table_index = ~0;
u32 ip6_table_index = ~0;
+ u32 ip4_punt_table_index = ~0;
+ u32 ip6_punt_table_index = ~0;
u32 l2_table_index = ~0;
u32 is_add = 1;
u32 idx_cnt = 0;
@@ -169,6 +187,10 @@ set_in_out_acl_command_fn (vlib_main_t * vm,
idx_cnt++;
else if (unformat (input, "ip6-table %d", &ip6_table_index))
idx_cnt++;
+ else if (unformat (input, "ip4-punt-table %d", &ip4_punt_table_index))
+ idx_cnt++;
+ else if (unformat (input, "ip6-punt-table %d", &ip6_punt_table_index))
+ idx_cnt++;
else if (unformat (input, "l2-table %d", &l2_table_index))
idx_cnt++;
else if (unformat (input, "del"))
@@ -186,9 +208,9 @@ set_in_out_acl_command_fn (vlib_main_t * vm,
if (idx_cnt > 1)
return clib_error_return (0, "Only one table index per API is allowed.");
- rv = vnet_set_in_out_acl_intfc (vm, sw_if_index, ip4_table_index,
- ip6_table_index, l2_table_index, is_add,
- is_output);
+ rv = vnet_set_in_out_acl_intfc (
+ vm, sw_if_index, ip4_table_index, ip6_table_index, l2_table_index,
+ ip4_punt_table_index, ip6_punt_table_index, is_add, is_output);
switch (rv)
{
@@ -200,6 +222,9 @@ set_in_out_acl_command_fn (vlib_main_t * vm,
case VNET_API_ERROR_NO_SUCH_ENTRY:
return clib_error_return (0, "No such classifier table");
+
+ default:
+ return clib_error_return (0, "Error: %d", rv);
}
return 0;
}
@@ -230,13 +255,13 @@ set_output_acl_command_fn (vlib_main_t * vm,
* Note: Only one table index per API call is allowed.
*
*/
-/* *INDENT-OFF* */
VLIB_CLI_COMMAND (set_input_acl_command, static) = {
- .path = "set interface input acl",
- .short_help =
+ .path = "set interface input acl",
+ .short_help =
"set interface input acl intfc <int> [ip4-table <index>]\n"
- " [ip6-table <index>] [l2-table <index>] [del]",
- .function = set_input_acl_command_fn,
+ " [ip6-table <index>] [l2-table <index>] [ip4-punt-table <index>]\n"
+ " [ip6-punt-table <index> [del]",
+ .function = set_input_acl_command_fn,
};
VLIB_CLI_COMMAND (set_output_acl_command, static) = {
.path = "set interface output acl",
@@ -245,7 +270,6 @@ VLIB_CLI_COMMAND (set_output_acl_command, static) = {
" [ip6-table <index>] [l2-table <index>] [del]",
.function = set_output_acl_command_fn,
};
-/* *INDENT-ON* */
clib_error_t *
in_out_acl_init (vlib_main_t * vm)
@@ -258,12 +282,10 @@ in_out_acl_init (vlib_main_t * vm)
return 0;
}
-/* *INDENT-OFF* */
VLIB_INIT_FUNCTION (in_out_acl_init) =
{
.runs_after = VLIB_INITS("ip_in_out_acl_init"),
};
-/* *INDENT-ON* */
uword
unformat_acl_type (unformat_input_t * input, va_list * args)
@@ -366,7 +388,6 @@ show_outacl_command_fn (vlib_main_t * vm,
IN_OUT_ACL_OUTPUT_TABLE_GROUP);
}
-/* *INDENT-OFF* */
VLIB_CLI_COMMAND (show_inacl_command, static) = {
.path = "show inacl",
.short_help = "show inacl type [ip4|ip6|l2]",
@@ -377,7 +398,6 @@ VLIB_CLI_COMMAND (show_outacl_command, static) = {
.short_help = "show outacl type [ip4|ip6|l2]",
.function = show_outacl_command_fn,
};
-/* *INDENT-ON* */
/*
* fd.io coding-style-patch-verification: ON
diff --git a/src/vnet/classify/in_out_acl.h b/src/vnet/classify/in_out_acl.h
index be0323055d8..331c64f531f 100644
--- a/src/vnet/classify/in_out_acl.h
+++ b/src/vnet/classify/in_out_acl.h
@@ -31,6 +31,8 @@ typedef enum
IN_OUT_ACL_TABLE_IP4,
IN_OUT_ACL_TABLE_IP6,
IN_OUT_ACL_TABLE_L2,
+ IN_OUT_ACL_TABLE_IP4_PUNT,
+ IN_OUT_ACL_TABLE_IP6_PUNT,
IN_OUT_ACL_N_TABLES,
} in_out_acl_table_id_t;
@@ -59,14 +61,14 @@ typedef struct
extern in_out_acl_main_t in_out_acl_main;
-int vnet_set_in_out_acl_intfc (vlib_main_t * vm, u32 sw_if_index,
- u32 ip4_table_index,
- u32 ip6_table_index,
- u32 l2_table_index, u32 is_add, u32 is_output);
+int vnet_set_in_out_acl_intfc (vlib_main_t *vm, u32 sw_if_index,
+ u32 ip4_table_index, u32 ip6_table_index,
+ u32 l2_table_index, u32 ip4_punt_table_index,
+ u32 ip6_punt_table_index, u32 is_add,
+ u32 is_output);
-int vnet_set_input_acl_intfc (vlib_main_t * vm, u32 sw_if_index,
- u32 ip4_table_index,
- u32 ip6_table_index,
+int vnet_set_input_acl_intfc (vlib_main_t *vm, u32 sw_if_index,
+ u32 ip4_table_index, u32 ip6_table_index,
u32 l2_table_index, u32 is_add);
int vnet_set_output_acl_intfc (vlib_main_t * vm, u32 sw_if_index,
diff --git a/src/vnet/classify/ip_classify.c b/src/vnet/classify/ip_classify.c
index a5c044521bf..e8562c6912c 100644
--- a/src/vnet/classify/ip_classify.c
+++ b/src/vnet/classify/ip_classify.c
@@ -190,7 +190,7 @@ ip_classify_inline (vlib_main_t * vm,
u32 table_index0;
vnet_classify_table_t *t0;
vnet_classify_entry_t *e0;
- u64 hash0;
+ u32 hash0;
u8 *h0;
/* Stride 3 seems to work best */
@@ -199,7 +199,7 @@ ip_classify_inline (vlib_main_t * vm,
vlib_buffer_t *p1 = vlib_get_buffer (vm, from[3]);
vnet_classify_table_t *tp1;
u32 table_index1;
- u64 phash1;
+ u32 phash1;
table_index1 = vnet_buffer (p1)->l2_classify.table_index;
@@ -309,7 +309,6 @@ VLIB_NODE_FN (ip4_classify_node) (vlib_main_t * vm,
}
-/* *INDENT-OFF* */
VLIB_REGISTER_NODE (ip4_classify_node) = {
.name = "ip4-classify",
.vector_size = sizeof (u32),
@@ -320,7 +319,6 @@ VLIB_REGISTER_NODE (ip4_classify_node) = {
.n_next_nodes = 0,
};
-/* *INDENT-ON* */
VLIB_NODE_FN (ip6_classify_node) (vlib_main_t * vm,
vlib_node_runtime_t * node,
@@ -330,7 +328,6 @@ VLIB_NODE_FN (ip6_classify_node) (vlib_main_t * vm,
}
-/* *INDENT-OFF* */
VLIB_REGISTER_NODE (ip6_classify_node) = {
.name = "ip6-classify",
.vector_size = sizeof (u32),
@@ -341,7 +338,6 @@ VLIB_REGISTER_NODE (ip6_classify_node) = {
.n_next_nodes = 0,
};
-/* *INDENT-ON* */
#ifndef CLIB_MARCH_VARIANT
static clib_error_t *
diff --git a/src/vnet/classify/pcap_classify.h b/src/vnet/classify/pcap_classify.h
index e079816f62c..a4ebcd1241c 100644
--- a/src/vnet/classify/pcap_classify.h
+++ b/src/vnet/classify/pcap_classify.h
@@ -47,11 +47,11 @@ vnet_is_packet_pcaped (vnet_pcap_t *pp, vlib_buffer_t *b, u32 sw_if_index)
return 0; /* wrong error */
if (filter_classify_table_index != ~0 &&
- vnet_is_packet_traced_inline (b, filter_classify_table_index,
- 0 /* full classify */) != 1)
+ pp->current_filter_function (b, filter_classify_table_index,
+ 0 /* full classify */) != 1)
return 0; /* not matching the filter, skip */
- return 1; /* success */
+ return 1;
}
/*
diff --git a/src/vnet/classify/policer_classify.c b/src/vnet/classify/policer_classify.c
index 4cf12a24e9e..814adefc987 100644
--- a/src/vnet/classify/policer_classify.c
+++ b/src/vnet/classify/policer_classify.c
@@ -164,7 +164,6 @@ set_policer_classify_command_fn (vlib_main_t * vm,
return 0;
}
-/* *INDENT-OFF* */
VLIB_CLI_COMMAND (set_policer_classify_command, static) = {
.path = "set policer classify",
.short_help =
@@ -172,7 +171,6 @@ VLIB_CLI_COMMAND (set_policer_classify_command, static) = {
" [ip6-table <index>] [l2-table <index>] [del]",
.function = set_policer_classify_command_fn,
};
-/* *INDENT-ON* */
static uword
unformat_table_type (unformat_input_t * input, va_list * va)
@@ -231,13 +229,11 @@ show_policer_classify_command_fn (vlib_main_t * vm,
return 0;
}
-/* *INDENT-OFF* */
VLIB_CLI_COMMAND (show_policer_classify_command, static) = {
.path = "show classify policer",
.short_help = "show classify policer type [ip4|ip6|l2]",
.function = show_policer_classify_command_fn,
};
-/* *INDENT-ON* */
/*
* fd.io coding-style-patch-verification: ON
diff --git a/src/vnet/classify/trace_classify.h b/src/vnet/classify/trace_classify.h
index bc25ecd0ff7..03421210d03 100644
--- a/src/vnet/classify/trace_classify.h
+++ b/src/vnet/classify/trace_classify.h
@@ -29,6 +29,8 @@
* @param u32 classify_table_index - classifier table index
* @return 0 => no trace, 1 => trace, -1 => error
*/
+int vnet_is_packet_traced (vlib_buffer_t *b, u32 classify_table_index,
+ int func);
static inline int
vnet_is_packet_traced_inline (vlib_buffer_t * b,
@@ -43,6 +45,9 @@ vnet_is_packet_traced_inline (vlib_buffer_t * b,
if (func != 0)
return -1;
+ if (classify_table_index == ~0)
+ return -1;
+
/* This will happen... */
if (pool_is_free_index (vcm->tables, classify_table_index))
return -1;
diff --git a/src/vnet/classify/vnet_classify.c b/src/vnet/classify/vnet_classify.c
index d36d93b5f31..77c1c81f9c4 100644
--- a/src/vnet/classify/vnet_classify.c
+++ b/src/vnet/classify/vnet_classify.c
@@ -139,7 +139,7 @@ vnet_classify_new_table (vnet_classify_main_t *cm, const u8 *mask,
pool_get_aligned_zero (cm->tables, t, CLIB_CACHE_LINE_BYTES);
- vec_validate_aligned (t->mask, match_n_vectors - 1, sizeof (u32x4));
+ clib_memset_u32 (t->mask, 0, 4 * ARRAY_LEN (t->mask));
clib_memcpy_fast (t->mask, mask, match_n_vectors * sizeof (u32x4));
t->next_table_index = ~0;
@@ -148,6 +148,7 @@ vnet_classify_new_table (vnet_classify_main_t *cm, const u8 *mask,
t->match_n_vectors = match_n_vectors;
t->skip_n_vectors = skip_n_vectors;
t->entries_per_page = 2;
+ t->load_mask = pow2_mask (match_n_vectors * 2);
t->mheap = clib_mem_create_heap (0, memory_size, 1 /* locked */ ,
"classify");
@@ -175,7 +176,6 @@ vnet_classify_delete_table_index (vnet_classify_main_t * cm,
/* Recursively delete the entire chain */
vnet_classify_delete_table_index (cm, t->next_table_index, del_chain);
- vec_free (t->mask);
vec_free (t->buckets);
clib_mem_destroy_heap (t->mheap);
pool_put (cm->tables, t);
@@ -293,7 +293,7 @@ split_and_rehash (vnet_classify_table_t * t,
for (i = 0; i < length_in_entries; i++)
{
- u64 new_hash;
+ u32 new_hash;
v = vnet_classify_entry_at_index (t, old_values, i);
@@ -424,7 +424,7 @@ vnet_classify_add_del (vnet_classify_table_t *t, vnet_classify_entry_t *add_v,
u32 value_index;
int rv = 0;
int i;
- u64 hash, new_hash;
+ u32 hash, new_hash;
u32 limit;
u32 old_log2_pages, new_log2_pages;
u32 thread_index = vlib_get_thread_index ();
@@ -640,28 +640,26 @@ unlock:
return rv;
}
-/* *INDENT-OFF* */
typedef CLIB_PACKED(struct {
ethernet_header_t eh;
ip4_header_t ip;
}) classify_data_or_mask_t;
-/* *INDENT-ON* */
-u64
-vnet_classify_hash_packet (vnet_classify_table_t * t, u8 * h)
+u32
+vnet_classify_hash_packet (const vnet_classify_table_t *t, u8 *h)
{
return vnet_classify_hash_packet_inline (t, h);
}
vnet_classify_entry_t *
-vnet_classify_find_entry (vnet_classify_table_t * t,
- u8 * h, u64 hash, f64 now)
+vnet_classify_find_entry (const vnet_classify_table_t *t, u8 *h, u32 hash,
+ f64 now)
{
return vnet_classify_find_entry_inline (t, h, hash, now);
}
-static u8 *
-format_classify_entry (u8 * s, va_list * args)
+u8 *
+format_classify_entry (u8 *s, va_list *args)
{
vnet_classify_table_t *t = va_arg (*args, vnet_classify_table_t *);
vnet_classify_entry_t *e = va_arg (*args, vnet_classify_entry_t *);
@@ -777,8 +775,10 @@ vnet_classify_add_del_table (vnet_classify_main_t *cm, const u8 *mask,
else /* update */
{
vnet_classify_main_t *cm = &vnet_classify_main;
- t = pool_elt_at_index (cm->tables, *table_index);
+ if (pool_is_free_index (cm->tables, *table_index))
+ return VNET_API_ERROR_CLASSIFY_TABLE_NOT_FOUND;
+ t = pool_elt_at_index (cm->tables, *table_index);
t->next_table_index = next_table_index;
}
return 0;
@@ -1233,12 +1233,16 @@ unformat_classify_mask (unformat_input_t * input, va_list * args)
u8 *l2 = 0;
u8 *l3 = 0;
u8 *l4 = 0;
+ u8 add_l2 = 1;
int i;
while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
{
if (unformat (input, "hex %U", unformat_hex_string, &mask))
;
+ else if (unformat (input, "l2 none"))
+ /* Don't add the l2 header in the mask */
+ add_l2 = 0;
else if (unformat (input, "l2 %U", unformat_l2_mask, &l2))
;
else if (unformat (input, "l3 %U", unformat_l3_mask, &l3))
@@ -1249,6 +1253,15 @@ unformat_classify_mask (unformat_input_t * input, va_list * args)
break;
}
+ if (l2 && !add_l2)
+ {
+ vec_free (mask);
+ vec_free (l2);
+ vec_free (l3);
+ vec_free (l4);
+ return 0;
+ }
+
if (l4 && !l3)
{
vec_free (mask);
@@ -1261,15 +1274,20 @@ unformat_classify_mask (unformat_input_t * input, va_list * args)
{
if (l2 || l3 || l4)
{
- /* "With a free Ethernet header in every package" */
- if (l2 == 0)
- vec_validate (l2, 13);
- mask = l2;
- if (l3)
+ if (add_l2)
{
- vec_append (mask, l3);
- vec_free (l3);
+ /* "With a free Ethernet header in every package" */
+ if (l2 == 0)
+ vec_validate (l2, 13);
+ mask = l2;
+ if (l3)
+ {
+ vec_append (mask, l3);
+ vec_free (l3);
+ }
}
+ else
+ mask = l3;
if (l4)
{
vec_append (mask, l4);
@@ -1302,7 +1320,7 @@ unformat_classify_mask (unformat_input_t * input, va_list * args)
if (match == 0)
clib_warning ("BUG: match 0");
- _vec_len (mask) = match * sizeof (u32x4);
+ vec_set_len (mask, match * sizeof (u32x4));
*matchp = match;
*maskp = mask;
@@ -1313,12 +1331,11 @@ unformat_classify_mask (unformat_input_t * input, va_list * args)
return 0;
}
-#define foreach_l2_input_next \
-_(drop, DROP) \
-_(ethernet, ETHERNET_INPUT) \
-_(ip4, IP4_INPUT) \
-_(ip6, IP6_INPUT) \
-_(li, LI)
+#define foreach_l2_input_next \
+ _ (drop, DROP) \
+ _ (ethernet, ETHERNET_INPUT) \
+ _ (ip4, IP4_INPUT) \
+ _ (ip6, IP6_INPUT)
uword
unformat_l2_input_next_index (unformat_input_t * input, va_list * args)
@@ -1618,7 +1635,6 @@ classify_table_command_fn (vlib_main_t * vm,
return 0;
}
-/* *INDENT-OFF* */
VLIB_CLI_COMMAND (classify_table, static) =
{
.path = "classify table",
@@ -1630,7 +1646,6 @@ VLIB_CLI_COMMAND (classify_table, static) =
"\n [del] [del-chain]",
.function = classify_table_command_fn,
};
-/* *INDENT-ON* */
static int
filter_table_mask_compare (void *a1, void *a2)
@@ -1649,13 +1664,13 @@ filter_table_mask_compare (void *a1, void *a2)
m1 = (u8 *) (t1->mask);
m2 = (u8 *) (t2->mask);
- for (i = 0; i < vec_len (t1->mask) * sizeof (u32x4); i++)
+ for (i = 0; i < t1->match_n_vectors * sizeof (u32x4); i++)
{
n1 += count_set_bits (m1[0]);
m1++;
}
- for (i = 0; i < vec_len (t2->mask) * sizeof (u32x4); i++)
+ for (i = 0; i < t2->match_n_vectors * sizeof (u32x4); i++)
{
n2 += count_set_bits (m2[0]);
m2++;
@@ -1815,11 +1830,11 @@ classify_lookup_chain (u32 table_index, u8 * mask, u32 n_skip, u32 n_match)
continue;
/* Masks aren't congruent, can't use this table. */
- if (vec_len (t->mask) * sizeof (u32x4) != vec_len (mask))
+ if (t->match_n_vectors * sizeof (u32x4) != vec_len (mask))
continue;
/* Masks aren't bit-for-bit identical, can't use this table. */
- if (memcmp (t->mask, mask, vec_len (mask)))
+ if (memcmp (t->mask, mask, t->match_n_vectors * sizeof (u32x4)))
continue;
/* Winner... */
@@ -2034,7 +2049,7 @@ vlib_enable_disable_pkt_trace_filter (int enable)
/*?
* Construct an arbitrary set of packet classifier tables for use with
- * "pcap rx | tx trace," and with the vpp packet tracer
+ * "pcap trace rx | tx," and with the vpp packet tracer
*
* Packets which match a rule in the classifier table chain
* will be traced. The tables are automatically ordered so that
@@ -2043,7 +2058,7 @@ vlib_enable_disable_pkt_trace_filter (int enable)
* It's reasonably likely that folks will configure a single
* table with one or two matches. As a result, we configure
* 8 hash buckets and 128K of match rule space. One can override
- * the defaults by specifiying "buckets <nnn>" and "memory-size <xxx>"
+ * the defaults by specifying "buckets <nnn>" and "memory-size <xxx>"
* as desired.
*
* To build up complex filter chains, repeatedly issue the
@@ -2077,18 +2092,20 @@ vlib_enable_disable_pkt_trace_filter (int enable)
* @cliexpar
* Configuring the classify filter
*
- * Configure a simple classify filter, and configure pcap rx trace to use it:
+ * Configure a simple classify filter, and configure pcap trace rx to use it:
*
- * <b><em>classify filter rx mask l3 ip4 src match l3 ip4 src 192.168.1.11"</em></b><br>
- * <b><em>pcap rx trace on max 100 filter</em></b>
+ * @cliexcmd{classify filter rx mask l3 ip4 src match l3 ip4 src 192.168.1.11}
+ * <b><em>pcap trace rx max 100 filter</em></b>
*
* Configure another fairly simple filter
*
- * <b><em>classify filter mask l3 ip4 src dst match l3 ip4 src 192.168.1.10 dst 192.168.2.10"</em></b>
+ * @cliexcmd{classify filter mask l3 ip4 src dst match l3 ip4 src 192.168.1.10
+ * dst 192.168.2.10}
*
*
* Configure a filter for use with the vpp packet tracer:
- * <b><em>classify filter trace mask l3 ip4 src dst match l3 ip4 src 192.168.1.10 dst 192.168.2.10"</em></b>
+ * @cliexcmd{classify filter trace mask l3 ip4 src dst match l3 ip4 src
+ * 192.168.1.10 dst 192.168.2.10}
* <b><em>trace add dpdk-input 100 filter</em></b>
*
* Clear classifier filters
@@ -2096,7 +2113,7 @@ vlib_enable_disable_pkt_trace_filter (int enable)
* <b><em>classify filter [trace | rx | tx | <intfc>] del</em></b>
*
* To display the top-level classifier tables for each use case:
- * <b><em>show classify filter</em/></b>
+ * <b><em>show classify filter</em></b>
*
* To inspect the classifier tables, use
*
@@ -2104,7 +2121,6 @@ vlib_enable_disable_pkt_trace_filter (int enable)
* The verbose form displays all of the match rules, with hit-counters
* @cliexend
?*/
-/* *INDENT-OFF* */
VLIB_CLI_COMMAND (classify_filter, static) =
{
.path = "classify filter",
@@ -2114,7 +2130,6 @@ VLIB_CLI_COMMAND (classify_filter, static) =
" [buckets <nn>] [memory-size <n>]",
.function = classify_filter_command_fn,
};
-/* *INDENT-ON* */
static clib_error_t *
show_classify_filter_command_fn (vlib_main_t * vm,
@@ -2194,14 +2209,12 @@ show_classify_filter_command_fn (vlib_main_t * vm,
}
-/* *INDENT-OFF* */
VLIB_CLI_COMMAND (show_classify_filter, static) =
{
.path = "show classify filter",
.short_help = "show classify filter [verbose [nn]]",
.function = show_classify_filter_command_fn,
};
-/* *INDENT-ON* */
u8 *
format_vnet_classify_table (u8 *s, va_list *args)
@@ -2213,7 +2226,7 @@ format_vnet_classify_table (u8 *s, va_list *args)
if (index == ~0)
{
- s = format (s, "%10s%10s%10s%10s", "TableIdx", "Sessions", "NextTbl",
+ s = format (s, "\n%10s%10s%10s%10s", "TableIdx", "Sessions", "NextTbl",
"NextNode", verbose ? "Details" : "");
return s;
}
@@ -2264,21 +2277,21 @@ show_classify_tables_command_fn (vlib_main_t * vm,
break;
}
- /* *INDENT-OFF* */
pool_foreach (t, cm->tables)
{
if (match_index == ~0 || (match_index == t - cm->tables))
vec_add1 (indices, t - cm->tables);
}
- /* *INDENT-ON* */
if (vec_len (indices))
{
- vlib_cli_output (vm, "%U", format_vnet_classify_table, cm, verbose,
- ~0 /* hdr */ );
for (i = 0; i < vec_len (indices); i++)
- vlib_cli_output (vm, "%U", format_vnet_classify_table, cm,
- verbose, indices[i]);
+ {
+ vlib_cli_output (vm, "%U", format_vnet_classify_table, cm, verbose,
+ ~0 /* hdr */);
+ vlib_cli_output (vm, "%U", format_vnet_classify_table, cm, verbose,
+ indices[i]);
+ }
}
else
vlib_cli_output (vm, "No classifier tables configured");
@@ -2288,13 +2301,11 @@ show_classify_tables_command_fn (vlib_main_t * vm,
return 0;
}
-/* *INDENT-OFF* */
VLIB_CLI_COMMAND (show_classify_table_command, static) = {
.path = "show classify tables",
.short_help = "show classify tables [index <nn>]",
.function = show_classify_tables_command_fn,
};
-/* *INDENT-ON* */
uword
unformat_l4_match (unformat_input_t * input, va_list * args)
@@ -2314,7 +2325,7 @@ unformat_l4_match (unformat_input_t * input, va_list * args)
else if (unformat (input, "dst_port %d", &dst_port))
;
else
- return 0;
+ break;
}
h.src_port = clib_host_to_net_u16 (src_port);
@@ -2675,6 +2686,7 @@ unformat_classify_match (unformat_input_t * input, va_list * args)
u8 *l2 = 0;
u8 *l3 = 0;
u8 *l4 = 0;
+ u8 add_l2 = 1;
if (pool_is_free_index (cm->tables, table_index))
return 0;
@@ -2685,6 +2697,9 @@ unformat_classify_match (unformat_input_t * input, va_list * args)
{
if (unformat (input, "hex %U", unformat_hex_string, &match))
;
+ else if (unformat (input, "l2 none"))
+ /* Don't add the l2 header in the mask */
+ add_l2 = 0;
else if (unformat (input, "l2 %U", unformat_l2_match, &l2))
;
else if (unformat (input, "l3 %U", unformat_l3_match, &l3))
@@ -2695,6 +2710,15 @@ unformat_classify_match (unformat_input_t * input, va_list * args)
break;
}
+ if (l2 && !add_l2)
+ {
+ vec_free (match);
+ vec_free (l2);
+ vec_free (l3);
+ vec_free (l4);
+ return 0;
+ }
+
if (l4 && !l3)
{
vec_free (match);
@@ -2707,15 +2731,20 @@ unformat_classify_match (unformat_input_t * input, va_list * args)
{
if (l2 || l3 || l4)
{
- /* "Win a free Ethernet header in every packet" */
- if (l2 == 0)
- vec_validate_aligned (l2, 13, sizeof (u32x4));
- match = l2;
- if (l3)
+ if (add_l2)
{
- vec_append_aligned (match, l3, sizeof (u32x4));
- vec_free (l3);
+ /* "Win a free Ethernet header in every packet" */
+ if (l2 == 0)
+ vec_validate_aligned (l2, 13, sizeof (u32x4));
+ match = l2;
+ if (l3)
+ {
+ vec_append_aligned (match, l3, sizeof (u32x4));
+ vec_free (l3);
+ }
}
+ else
+ match = l3;
if (l4)
{
vec_append_aligned (match, l4, sizeof (u32x4));
@@ -2730,8 +2759,8 @@ unformat_classify_match (unformat_input_t * input, va_list * args)
sizeof (u32x4));
/* Set size, include skipped vectors */
- _vec_len (match) =
- (t->match_n_vectors + t->skip_n_vectors) * sizeof (u32x4);
+ vec_set_len (match,
+ (t->match_n_vectors + t->skip_n_vectors) * sizeof (u32x4));
*matchp = match;
@@ -2743,9 +2772,9 @@ unformat_classify_match (unformat_input_t * input, va_list * args)
int
vnet_classify_add_del_session (vnet_classify_main_t *cm, u32 table_index,
- const u8 *match, u32 hit_next_index,
+ const u8 *match, u16 hit_next_index,
u32 opaque_index, i32 advance, u8 action,
- u16 metadata, int is_add)
+ u32 metadata, int is_add)
{
vnet_classify_table_t *t;
vnet_classify_entry_5_t _max_e __attribute__ ((aligned (16)));
@@ -2889,7 +2918,6 @@ classify_session_command_fn (vlib_main_t * vm,
return 0;
}
-/* *INDENT-OFF* */
VLIB_CLI_COMMAND (classify_session_command, static) = {
.path = "classify session",
.short_help =
@@ -2899,7 +2927,6 @@ VLIB_CLI_COMMAND (classify_session_command, static) = {
"\n [action set-ip4-fib-id|set-ip6-fib-id|set-sr-policy-index <n>] [del]",
.function = classify_session_command_fn,
};
-/* *INDENT-ON* */
static uword
unformat_opaque_sw_if_index (unformat_input_t * input, va_list * args)
@@ -3043,7 +3070,12 @@ vnet_is_packet_traced (vlib_buffer_t * b, u32 classify_table_index, int func)
{
return vnet_is_packet_traced_inline (b, classify_table_index, func);
}
-
+VLIB_REGISTER_TRACE_FILTER_FUNCTION (vnet_is_packet_traced_fn, static) = {
+ .name = "vnet_is_packet_traced",
+ .description = "classifier based filter",
+ .priority = 50,
+ .function = vnet_is_packet_traced
+};
#define TEST_CODE 0
@@ -3195,7 +3227,7 @@ test_classify_churn (test_classify_main_t * tm)
for (i = 0; i < tm->sessions; i++)
{
u8 *key_minus_skip;
- u64 hash;
+ u32 hash;
vnet_classify_entry_t *e;
ep = tm->entries + i;
@@ -3312,7 +3344,6 @@ test_classify_command_fn (vlib_main_t * vm,
return error;
}
-/* *INDENT-OFF* */
VLIB_CLI_COMMAND (test_classify_command, static) = {
.path = "test classify",
.short_help =
@@ -3321,7 +3352,6 @@ VLIB_CLI_COMMAND (test_classify_command, static) = {
" [churn-test]",
.function = test_classify_command_fn,
};
-/* *INDENT-ON* */
#endif /* TEST_CODE */
/*
diff --git a/src/vnet/classify/vnet_classify.h b/src/vnet/classify/vnet_classify.h
index 06784e0541e..768593c45af 100644
--- a/src/vnet/classify/vnet_classify.h
+++ b/src/vnet/classify/vnet_classify.h
@@ -89,15 +89,17 @@ typedef struct _vnet_classify_entry
/* last heard time */
f64 last_heard;
+ u32 metadata;
+
+ /* Graph node next index */
+ u16 next_index;
+
+ vnet_classify_action_t action;
+
/* Really only need 1 bit */
u8 flags;
#define VNET_CLASSIFY_ENTRY_FREE (1<<0)
- vnet_classify_action_t action;
- u16 metadata;
- /* Graph node next index */
- u32 next_index;
-
/* Must be aligned to a 16-octet boundary */
u32x4 key[0];
} vnet_classify_entry_t;
@@ -147,9 +149,6 @@ typedef struct
typedef struct
{
CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
- /* Mask to apply after skipping N vectors */
- u32x4 *mask;
-
/* hash Buckets */
vnet_classify_bucket_t *buckets;
@@ -165,6 +164,7 @@ typedef struct
u32 entries_per_page;
u32 skip_n_vectors;
u32 match_n_vectors;
+ u16 load_mask;
/* Index of next table to try */
u32 next_table_index;
@@ -196,6 +196,14 @@ typedef struct
/* Writer (only) lock for this table */
clib_spinlock_t writer_lock;
+ CLIB_CACHE_LINE_ALIGN_MARK (cacheline2);
+ /* Mask to apply after skipping N vectors */
+ union
+ {
+ u32x4 mask[8];
+ u32 mask_u32[32];
+ };
+
} vnet_classify_table_t;
/**
@@ -233,10 +241,11 @@ struct _vnet_classify_main
extern vnet_classify_main_t vnet_classify_main;
+u8 *format_classify_entry (u8 *s, va_list *args);
u8 *format_classify_table (u8 * s, va_list * args);
u8 *format_vnet_classify_table (u8 *s, va_list *args);
-u64 vnet_classify_hash_packet (vnet_classify_table_t * t, u8 * h);
+u32 vnet_classify_hash_packet (const vnet_classify_table_t *t, u8 *h);
static_always_inline vnet_classify_table_t *
vnet_classify_table_get (u32 table_index)
@@ -246,63 +255,85 @@ vnet_classify_table_get (u32 table_index)
return (pool_elt_at_index (vcm->tables, table_index));
}
-static inline u64
-vnet_classify_hash_packet_inline (vnet_classify_table_t *t, const u8 *h)
+static inline u32
+vnet_classify_hash_packet_inline (const vnet_classify_table_t *t, const u8 *h)
{
- u32x4 *mask;
+ u64 xor_sum;
+ ASSERT (t);
+ h += t->skip_n_vectors * 16;
- union
- {
- u32x4 as_u32x4;
- u64 as_u64[2];
- } xor_sum __attribute__ ((aligned (sizeof (u32x4))));
+#if defined(CLIB_HAVE_VEC512) && defined(CLIB_HAVE_VEC512_MASK_LOAD_STORE)
+ u64x8 xor_sum_x8, *mask = (u64x8 *) t->mask;
+ u16 load_mask = t->load_mask;
+ u64x8u *data = (u64x8u *) h;
+
+ xor_sum_x8 = u64x8_mask_load_zero (data, load_mask) & mask[0];
+
+ if (PREDICT_FALSE (load_mask >> 8))
+ xor_sum_x8 ^= u64x8_mask_load_zero (data + 1, load_mask >> 8) & mask[1];
+
+ xor_sum_x8 ^= u64x8_align_right (xor_sum_x8, xor_sum_x8, 4);
+ xor_sum_x8 ^= u64x8_align_right (xor_sum_x8, xor_sum_x8, 2);
+ xor_sum = xor_sum_x8[0] ^ xor_sum_x8[1];
+#elif defined(CLIB_HAVE_VEC256) && defined(CLIB_HAVE_VEC256_MASK_LOAD_STORE)
+ u64x4 xor_sum_x4, *mask = (u64x4 *) t->mask;
+ u16 load_mask = t->load_mask;
+ u64x4u *data = (u64x4u *) h;
+
+ xor_sum_x4 = u64x4_mask_load_zero (data, load_mask) & mask[0];
+ xor_sum_x4 ^= u64x4_mask_load_zero (data + 1, load_mask >> 4) & mask[1];
+
+ if (PREDICT_FALSE (load_mask >> 8))
+ xor_sum_x4 ^= u64x4_mask_load_zero (data + 2, load_mask >> 8) & mask[2];
+
+ xor_sum_x4 ^= u64x4_align_right (xor_sum_x4, xor_sum_x4, 2);
+ xor_sum = xor_sum_x4[0] ^ xor_sum_x4[1];
+#elif defined(CLIB_HAVE_VEC128)
+ u64x2 *mask = (u64x2 *) t->mask;
+ u64x2u *data = (u64x2u *) h;
+ u64x2 xor_sum_x2;
+
+ xor_sum_x2 = data[0] & mask[0];
- ASSERT (t);
- mask = t->mask;
-#ifdef CLIB_HAVE_VEC128
- u32x4u *data = (u32x4u *) h;
- xor_sum.as_u32x4 = data[0 + t->skip_n_vectors] & mask[0];
switch (t->match_n_vectors)
{
case 5:
- xor_sum.as_u32x4 ^= data[4 + t->skip_n_vectors] & mask[4];
+ xor_sum_x2 ^= data[4] & mask[4];
/* FALLTHROUGH */
case 4:
- xor_sum.as_u32x4 ^= data[3 + t->skip_n_vectors] & mask[3];
+ xor_sum_x2 ^= data[3] & mask[3];
/* FALLTHROUGH */
case 3:
- xor_sum.as_u32x4 ^= data[2 + t->skip_n_vectors] & mask[2];
+ xor_sum_x2 ^= data[2] & mask[2];
/* FALLTHROUGH */
case 2:
- xor_sum.as_u32x4 ^= data[1 + t->skip_n_vectors] & mask[1];
+ xor_sum_x2 ^= data[1] & mask[1];
/* FALLTHROUGH */
case 1:
break;
default:
abort ();
}
+ xor_sum = xor_sum_x2[0] ^ xor_sum_x2[1];
#else
- u32 skip_u64 = t->skip_n_vectors * 2;
- u64 *data64 = (u64 *) h;
- xor_sum.as_u64[0] = data64[0 + skip_u64] & ((u64 *) mask)[0];
- xor_sum.as_u64[1] = data64[1 + skip_u64] & ((u64 *) mask)[1];
+ u64 *data = (u64 *) h;
+ u64 *mask = (u64 *) t->mask;
+
+ xor_sum = (data[0] & mask[0]) ^ (data[1] & mask[1]);
+
switch (t->match_n_vectors)
{
case 5:
- xor_sum.as_u64[0] ^= data64[8 + skip_u64] & ((u64 *) mask)[8];
- xor_sum.as_u64[1] ^= data64[9 + skip_u64] & ((u64 *) mask)[9];
+ xor_sum ^= (data[8] & mask[8]) ^ (data[9] & mask[9]);
/* FALLTHROUGH */
case 4:
- xor_sum.as_u64[0] ^= data64[6 + skip_u64] & ((u64 *) mask)[6];
- xor_sum.as_u64[1] ^= data64[7 + skip_u64] & ((u64 *) mask)[7];
+ xor_sum ^= (data[6] & mask[6]) ^ (data[7] & mask[7]);
/* FALLTHROUGH */
case 3:
- xor_sum.as_u64[0] ^= data64[4 + skip_u64] & ((u64 *) mask)[4];
- xor_sum.as_u64[1] ^= data64[5 + skip_u64] & ((u64 *) mask)[5];
+ xor_sum ^= (data[4] & mask[4]) ^ (data[5] & mask[5]);
/* FALLTHROUGH */
case 2:
- xor_sum.as_u64[0] ^= data64[2 + skip_u64] & ((u64 *) mask)[2];
- xor_sum.as_u64[1] ^= data64[3 + skip_u64] & ((u64 *) mask)[3];
+ xor_sum ^= (data[2] & mask[2]) ^ (data[3] & mask[3]);
/* FALLTHROUGH */
case 1:
break;
@@ -315,7 +346,7 @@ vnet_classify_hash_packet_inline (vnet_classify_table_t *t, const u8 *h)
#ifdef clib_crc32c_uses_intrinsics
return clib_crc32c ((u8 *) & xor_sum, sizeof (xor_sum));
#else
- return clib_xxhash (xor_sum.as_u64[0] ^ xor_sum.as_u64[1]);
+ return clib_xxhash (xor_sum);
#endif
}
@@ -332,7 +363,7 @@ vnet_classify_prefetch_bucket (vnet_classify_table_t * t, u64 hash)
}
static inline vnet_classify_entry_t *
-vnet_classify_get_entry (vnet_classify_table_t * t, uword offset)
+vnet_classify_get_entry (const vnet_classify_table_t *t, uword offset)
{
u8 *hp = clib_mem_get_heap_base (t->mheap);
u8 *vp = hp + offset;
@@ -354,8 +385,8 @@ vnet_classify_get_offset (vnet_classify_table_t * t,
}
static inline vnet_classify_entry_t *
-vnet_classify_entry_at_index (vnet_classify_table_t * t,
- vnet_classify_entry_t * e, u32 index)
+vnet_classify_entry_at_index (const vnet_classify_table_t *t,
+ vnet_classify_entry_t *e, u32 index)
{
u8 *eu8;
@@ -392,127 +423,151 @@ vnet_classify_prefetch_entry (vnet_classify_table_t * t, u64 hash)
clib_prefetch_load (e);
}
-vnet_classify_entry_t *vnet_classify_find_entry (vnet_classify_table_t * t,
- u8 * h, u64 hash, f64 now);
+vnet_classify_entry_t *
+vnet_classify_find_entry (const vnet_classify_table_t *t, u8 *h, u32 hash,
+ f64 now);
+
+static_always_inline int
+vnet_classify_entry_is_equal (vnet_classify_entry_t *v, const u8 *d, u8 *m,
+ u32 match_n_vectors, u16 load_mask)
+{
+#if defined(CLIB_HAVE_VEC512) && defined(CLIB_HAVE_VEC512_MASK_LOAD_STORE)
+ u64x8 r, *mask = (u64x8 *) m;
+ u64x8u *data = (u64x8u *) d;
+ u64x8 *key = (u64x8 *) v->key;
+
+ r = (u64x8_mask_load_zero (data, load_mask) & mask[0]) ^
+ u64x8_mask_load_zero (key, load_mask);
+ load_mask >>= 8;
+
+ if (PREDICT_FALSE (load_mask))
+ r |= (u64x8_mask_load_zero (data + 1, load_mask) & mask[1]) ^
+ u64x8_mask_load_zero (key + 1, load_mask);
+
+ if (u64x8_is_all_zero (r))
+ return 1;
+
+#elif defined(CLIB_HAVE_VEC256) && defined(CLIB_HAVE_VEC256_MASK_LOAD_STORE)
+ u64x4 r, *mask = (u64x4 *) m;
+ u64x4u *data = (u64x4u *) d;
+ u64x4 *key = (u64x4 *) v->key;
+
+ r = (u64x4_mask_load_zero (data, load_mask) & mask[0]) ^
+ u64x4_mask_load_zero (key, load_mask);
+ load_mask >>= 4;
+
+ r |= (u64x4_mask_load_zero (data + 1, load_mask) & mask[1]) ^
+ u64x4_mask_load_zero (key + 1, load_mask);
+ load_mask >>= 4;
+
+ if (PREDICT_FALSE (load_mask))
+ r |= (u64x4_mask_load_zero (data + 2, load_mask) & mask[2]) ^
+ u64x4_mask_load_zero (key + 2, load_mask);
+
+ if (u64x4_is_all_zero (r))
+ return 1;
+
+#elif defined(CLIB_HAVE_VEC128)
+ u64x2u *data = (u64x2 *) d;
+ u64x2 *key = (u64x2 *) v->key;
+ u64x2 *mask = (u64x2 *) m;
+ u64x2 r;
+
+ r = (data[0] & mask[0]) ^ key[0];
+ switch (match_n_vectors)
+ {
+ case 5:
+ r |= (data[4] & mask[4]) ^ key[4];
+ /* fall through */
+ case 4:
+ r |= (data[3] & mask[3]) ^ key[3];
+ /* fall through */
+ case 3:
+ r |= (data[2] & mask[2]) ^ key[2];
+ /* fall through */
+ case 2:
+ r |= (data[1] & mask[1]) ^ key[1];
+ /* fall through */
+ case 1:
+ break;
+ default:
+ abort ();
+ }
+
+ if (u64x2_is_all_zero (r))
+ return 1;
+
+#else
+ u64 *data = (u64 *) d;
+ u64 *key = (u64 *) v->key;
+ u64 *mask = (u64 *) m;
+ u64 r;
+
+ r = ((data[0] & mask[0]) ^ key[0]) | ((data[1] & mask[1]) ^ key[1]);
+ switch (match_n_vectors)
+ {
+ case 5:
+ r |= ((data[8] & mask[8]) ^ key[8]) | ((data[9] & mask[9]) ^ key[9]);
+ /* fall through */
+ case 4:
+ r |= ((data[6] & mask[6]) ^ key[6]) | ((data[7] & mask[7]) ^ key[7]);
+ /* fall through */
+ case 3:
+ r |= ((data[4] & mask[4]) ^ key[4]) | ((data[5] & mask[5]) ^ key[5]);
+ /* fall through */
+ case 2:
+ r |= ((data[2] & mask[2]) ^ key[2]) | ((data[3] & mask[3]) ^ key[3]);
+ /* fall through */
+ case 1:
+ break;
+ default:
+ abort ();
+ }
+
+ if (r == 0)
+ return 1;
+
+#endif /* CLIB_HAVE_VEC128 */
+ return 0;
+}
static inline vnet_classify_entry_t *
-vnet_classify_find_entry_inline (vnet_classify_table_t *t, const u8 *h,
- u64 hash, f64 now)
+vnet_classify_find_entry_inline (const vnet_classify_table_t *t, const u8 *h,
+ u32 hash, f64 now)
{
vnet_classify_entry_t *v;
- u32x4 *mask, *key;
- union
- {
- u32x4 as_u32x4;
- u64 as_u64[2];
- } result __attribute__ ((aligned (sizeof (u32x4))));
vnet_classify_bucket_t *b;
- u32 value_index;
- u32 bucket_index;
- u32 limit;
+ u32 bucket_index, limit, pages, match_n_vectors = t->match_n_vectors;
+ u16 load_mask = t->load_mask;
+ u8 *mask = (u8 *) t->mask;
int i;
bucket_index = hash & (t->nbuckets - 1);
b = &t->buckets[bucket_index];
- mask = t->mask;
if (b->offset == 0)
return 0;
- hash >>= t->log2_nbuckets;
-
+ pages = 1 << b->log2_pages;
v = vnet_classify_get_entry (t, b->offset);
- value_index = hash & ((1 << b->log2_pages) - 1);
limit = t->entries_per_page;
if (PREDICT_FALSE (b->linear_search))
{
- value_index = 0;
- limit *= (1 << b->log2_pages);
+ limit *= pages;
+ v = vnet_classify_entry_at_index (t, v, 0);
}
-
- v = vnet_classify_entry_at_index (t, v, value_index);
-
-#ifdef CLIB_HAVE_VEC128
- const u32x4u *data = (const u32x4u *) h;
- for (i = 0; i < limit; i++)
+ else
{
- key = v->key;
- result.as_u32x4 = (data[0 + t->skip_n_vectors] & mask[0]) ^ key[0];
- switch (t->match_n_vectors)
- {
- case 5:
- result.as_u32x4 |= (data[4 + t->skip_n_vectors] & mask[4]) ^ key[4];
- /* FALLTHROUGH */
- case 4:
- result.as_u32x4 |= (data[3 + t->skip_n_vectors] & mask[3]) ^ key[3];
- /* FALLTHROUGH */
- case 3:
- result.as_u32x4 |= (data[2 + t->skip_n_vectors] & mask[2]) ^ key[2];
- /* FALLTHROUGH */
- case 2:
- result.as_u32x4 |= (data[1 + t->skip_n_vectors] & mask[1]) ^ key[1];
- /* FALLTHROUGH */
- case 1:
- break;
- default:
- abort ();
- }
-
- if (u32x4_is_all_zero (result.as_u32x4))
- {
- if (PREDICT_TRUE (now))
- {
- v->hits++;
- v->last_heard = now;
- }
- return (v);
- }
- v = vnet_classify_entry_at_index (t, v, 1);
+ hash >>= t->log2_nbuckets;
+ v = vnet_classify_entry_at_index (t, v, hash & (pages - 1));
}
-#else
- u32 skip_u64 = t->skip_n_vectors * 2;
- const u64 *data64 = (const u64 *) h;
- for (i = 0; i < limit; i++)
- {
- key = v->key;
- result.as_u64[0] =
- (data64[0 + skip_u64] & ((u64 *) mask)[0]) ^ ((u64 *) key)[0];
- result.as_u64[1] =
- (data64[1 + skip_u64] & ((u64 *) mask)[1]) ^ ((u64 *) key)[1];
- switch (t->match_n_vectors)
- {
- case 5:
- result.as_u64[0] |=
- (data64[8 + skip_u64] & ((u64 *) mask)[8]) ^ ((u64 *) key)[8];
- result.as_u64[1] |=
- (data64[9 + skip_u64] & ((u64 *) mask)[9]) ^ ((u64 *) key)[9];
- /* FALLTHROUGH */
- case 4:
- result.as_u64[0] |=
- (data64[6 + skip_u64] & ((u64 *) mask)[6]) ^ ((u64 *) key)[6];
- result.as_u64[1] |=
- (data64[7 + skip_u64] & ((u64 *) mask)[7]) ^ ((u64 *) key)[7];
- /* FALLTHROUGH */
- case 3:
- result.as_u64[0] |=
- (data64[4 + skip_u64] & ((u64 *) mask)[4]) ^ ((u64 *) key)[4];
- result.as_u64[1] |=
- (data64[5 + skip_u64] & ((u64 *) mask)[5]) ^ ((u64 *) key)[5];
- /* FALLTHROUGH */
- case 2:
- result.as_u64[0] |=
- (data64[2 + skip_u64] & ((u64 *) mask)[2]) ^ ((u64 *) key)[2];
- result.as_u64[1] |=
- (data64[3 + skip_u64] & ((u64 *) mask)[3]) ^ ((u64 *) key)[3];
- /* FALLTHROUGH */
- case 1:
- break;
- default:
- abort ();
- }
+ h += t->skip_n_vectors * 16;
- if (result.as_u64[0] == 0 && result.as_u64[1] == 0)
+ for (i = 0; i < limit; i++)
+ {
+ if (vnet_classify_entry_is_equal (v, h, mask, match_n_vectors,
+ load_mask))
{
if (PREDICT_TRUE (now))
{
@@ -521,10 +576,8 @@ vnet_classify_find_entry_inline (vnet_classify_table_t *t, const u8 *h,
}
return (v);
}
-
v = vnet_classify_entry_at_index (t, v, 1);
}
-#endif /* CLIB_HAVE_VEC128 */
return 0;
}
@@ -535,9 +588,9 @@ vnet_classify_table_t *vnet_classify_new_table (vnet_classify_main_t *cm,
u32 match_n_vectors);
int vnet_classify_add_del_session (vnet_classify_main_t *cm, u32 table_index,
- const u8 *match, u32 hit_next_index,
+ const u8 *match, u16 hit_next_index,
u32 opaque_index, i32 advance, u8 action,
- u16 metadata, int is_add);
+ u32 metadata, int is_add);
int vnet_classify_add_del_table (vnet_classify_main_t *cm, const u8 *mask,
u32 nbuckets, u32 memory_size, u32 skip,