aboutsummaryrefslogtreecommitdiffstats
path: root/src/plugins/acl/hash_lookup.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/acl/hash_lookup.c')
-rw-r--r--src/plugins/acl/hash_lookup.c451
1 files changed, 230 insertions, 221 deletions
diff --git a/src/plugins/acl/hash_lookup.c b/src/plugins/acl/hash_lookup.c
index 2262402d52f..ad55054c3e3 100644
--- a/src/plugins/acl/hash_lookup.c
+++ b/src/plugins/acl/hash_lookup.c
@@ -33,126 +33,17 @@
#include "hash_lookup_private.h"
-static inline applied_hash_ace_entry_t **get_applied_hash_aces(acl_main_t *am, int is_input, u32 sw_if_index)
+always_inline applied_hash_ace_entry_t **get_applied_hash_aces(acl_main_t *am, u32 lc_index)
{
- applied_hash_ace_entry_t **applied_hash_aces = is_input ? vec_elt_at_index(am->input_hash_entry_vec_by_sw_if_index, sw_if_index)
+ applied_hash_ace_entry_t **applied_hash_aces = vec_elt_at_index(am->hash_entry_vec_by_lc_index, lc_index);
+
+/*is_input ? vec_elt_at_index(am->input_hash_entry_vec_by_sw_if_index, sw_if_index)
: vec_elt_at_index(am->output_hash_entry_vec_by_sw_if_index, sw_if_index);
+*/
return applied_hash_aces;
}
-
-/*
- * This returns true if there is indeed a match on the portranges.
- * With all these levels of indirections, this is not going to be very fast,
- * so, best use the individual ports or wildcard ports for performance.
- */
-static int
-match_portranges(acl_main_t *am, fa_5tuple_t *match, u32 index)
-{
-
- applied_hash_ace_entry_t **applied_hash_aces = get_applied_hash_aces(am, match->pkt.is_input, match->pkt.sw_if_index);
- applied_hash_ace_entry_t *pae = vec_elt_at_index((*applied_hash_aces), index);
-
- acl_rule_t *r = &(am->acls[pae->acl_index].rules[pae->ace_index]);
- DBG("PORTMATCH: %d <= %d <= %d && %d <= %d <= %d ?",
- r->src_port_or_type_first, match->l4.port[0], r->src_port_or_type_last,
- r->dst_port_or_code_first, match->l4.port[1], r->dst_port_or_code_last);
-
- return ( ((r->src_port_or_type_first <= match->l4.port[0]) && r->src_port_or_type_last >= match->l4.port[0]) &&
- ((r->dst_port_or_code_first <= match->l4.port[1]) && r->dst_port_or_code_last >= match->l4.port[1]) );
-}
-
-static u32
-multi_acl_match_get_applied_ace_index(acl_main_t *am, fa_5tuple_t *match)
-{
- clib_bihash_kv_48_8_t kv;
- clib_bihash_kv_48_8_t result;
- fa_5tuple_t *kv_key = (fa_5tuple_t *)kv.key;
- hash_acl_lookup_value_t *result_val = (hash_acl_lookup_value_t *)&result.value;
- u64 *pmatch = (u64 *)match;
- u64 *pmask;
- u64 *pkey;
- int mask_type_index;
- u32 curr_match_index = ~0;
-
- u32 sw_if_index = match->pkt.sw_if_index;
- u8 is_input = match->pkt.is_input;
- applied_hash_ace_entry_t **applied_hash_aces = get_applied_hash_aces(am, is_input, sw_if_index);
- applied_hash_acl_info_t **applied_hash_acls = is_input ? &am->input_applied_hash_acl_info_by_sw_if_index :
- &am->output_applied_hash_acl_info_by_sw_if_index;
-
- DBG("TRYING TO MATCH: %016llx %016llx %016llx %016llx %016llx %016llx",
- pmatch[0], pmatch[1], pmatch[2], pmatch[3], pmatch[4], pmatch[5]);
-
- for(mask_type_index=0; mask_type_index < pool_len(am->ace_mask_type_pool); mask_type_index++) {
- if (!clib_bitmap_get(vec_elt_at_index((*applied_hash_acls), sw_if_index)->mask_type_index_bitmap, mask_type_index)) {
- /* This bit is not set. Avoid trying to match */
- continue;
- }
- ace_mask_type_entry_t *mte = vec_elt_at_index(am->ace_mask_type_pool, mask_type_index);
- pmatch = (u64 *)match;
- pmask = (u64 *)&mte->mask;
- pkey = (u64 *)kv.key;
- /*
- * unrolling the below loop results in a noticeable performance increase.
- int i;
- for(i=0; i<6; i++) {
- kv.key[i] = pmatch[i] & pmask[i];
- }
- */
-
- *pkey++ = *pmatch++ & *pmask++;
- *pkey++ = *pmatch++ & *pmask++;
- *pkey++ = *pmatch++ & *pmask++;
- *pkey++ = *pmatch++ & *pmask++;
- *pkey++ = *pmatch++ & *pmask++;
- *pkey++ = *pmatch++ & *pmask++;
-
- kv_key->pkt.mask_type_index_lsb = mask_type_index;
- DBG(" KEY %3d: %016llx %016llx %016llx %016llx %016llx %016llx", mask_type_index,
- kv.key[0], kv.key[1], kv.key[2], kv.key[3], kv.key[4], kv.key[5]);
- int res = BV (clib_bihash_search) (&am->acl_lookup_hash, &kv, &result);
- if (res == 0) {
- DBG("ACL-MATCH! result_val: %016llx", result_val->as_u64);
- if (result_val->applied_entry_index < curr_match_index) {
- if (PREDICT_FALSE(result_val->need_portrange_check)) {
- /*
- * This is going to be slow, since we can have multiple superset
- * entries for narrow-ish portranges, e.g.:
- * 0..42 100..400, 230..60000,
- * so we need to walk linearly and check if they match.
- */
-
- u32 curr_index = result_val->applied_entry_index;
- while ((curr_index != ~0) && !match_portranges(am, match, curr_index)) {
- /* while no match and there are more entries, walk... */
- applied_hash_ace_entry_t *pae = vec_elt_at_index((*applied_hash_aces),curr_index);
- DBG("entry %d did not portmatch, advancing to %d", curr_index, pae->next_applied_entry_index);
- curr_index = pae->next_applied_entry_index;
- }
- if (curr_index < curr_match_index) {
- DBG("The index %d is the new candidate in portrange matches.", curr_index);
- curr_match_index = curr_index;
- } else {
- DBG("Curr portmatch index %d is too big vs. current matched one %d", curr_index, curr_match_index);
- }
- } else {
- /* The usual path is here. Found an entry in front of the current candiate - so it's a new one */
- DBG("This match is the new candidate");
- curr_match_index = result_val->applied_entry_index;
- if (!result_val->shadowed) {
- /* new result is known to not be shadowed, so no point to look up further */
- break;
- }
- }
- }
- }
- }
- DBG("MATCH-RESULT: %d", curr_match_index);
- return curr_match_index;
-}
-
static void
hashtable_add_del(acl_main_t *am, clib_bihash_kv_48_8_t *kv, int is_add)
{
@@ -165,7 +56,7 @@ hashtable_add_del(acl_main_t *am, clib_bihash_kv_48_8_t *kv, int is_add)
static void
fill_applied_hash_ace_kv(acl_main_t *am,
applied_hash_ace_entry_t **applied_hash_aces,
- u32 sw_if_index, u8 is_input,
+ u32 lc_index,
u32 new_index, clib_bihash_kv_48_8_t *kv)
{
fa_5tuple_t *kv_key = (fa_5tuple_t *)kv->key;
@@ -175,8 +66,7 @@ fill_applied_hash_ace_kv(acl_main_t *am,
memcpy(kv_key, &(vec_elt_at_index(ha->rules, pae->hash_ace_info_index)->match), sizeof(*kv_key));
/* initialize the sw_if_index and direction */
- kv_key->pkt.sw_if_index = sw_if_index;
- kv_key->pkt.is_input = is_input;
+ kv_key->pkt.lc_index = lc_index;
kv_val->as_u64 = 0;
kv_val->applied_entry_index = new_index;
kv_val->need_portrange_check = vec_elt_at_index(ha->rules, pae->hash_ace_info_index)->src_portrange_not_powerof2 ||
@@ -187,13 +77,13 @@ fill_applied_hash_ace_kv(acl_main_t *am,
static void
add_del_hashtable_entry(acl_main_t *am,
- u32 sw_if_index, u8 is_input,
+ u32 lc_index,
applied_hash_ace_entry_t **applied_hash_aces,
u32 index, int is_add)
{
clib_bihash_kv_48_8_t kv;
- fill_applied_hash_ace_kv(am, applied_hash_aces, sw_if_index, is_input, index, &kv);
+ fill_applied_hash_ace_kv(am, applied_hash_aces, lc_index, index, &kv);
hashtable_add_del(am, &kv, is_add);
}
@@ -201,16 +91,16 @@ add_del_hashtable_entry(acl_main_t *am,
static void
activate_applied_ace_hash_entry(acl_main_t *am,
- u32 sw_if_index, u8 is_input,
+ u32 lc_index,
applied_hash_ace_entry_t **applied_hash_aces,
u32 new_index)
{
clib_bihash_kv_48_8_t kv;
ASSERT(new_index != ~0);
applied_hash_ace_entry_t *pae = vec_elt_at_index((*applied_hash_aces), new_index);
- DBG("activate_applied_ace_hash_entry sw_if_index %d is_input %d new_index %d", sw_if_index, is_input, new_index);
+ DBG("activate_applied_ace_hash_entry lc_index %d new_index %d", lc_index, new_index);
- fill_applied_hash_ace_kv(am, applied_hash_aces, sw_if_index, is_input, new_index, &kv);
+ fill_applied_hash_ace_kv(am, applied_hash_aces, lc_index, new_index, &kv);
DBG("APPLY ADD KY: %016llx %016llx %016llx %016llx %016llx %016llx",
kv.key[0], kv.key[1], kv.key[2],
@@ -272,8 +162,9 @@ hash_acl_set_heap(acl_main_t *am)
}
void
-acl_plugin_hash_acl_set_validate_heap(acl_main_t *am, int on)
+acl_plugin_hash_acl_set_validate_heap(int on)
{
+ acl_main_t *am = &acl_main;
clib_mem_set_heap(hash_acl_set_heap(am));
mheap_t *h = mheap_header (am->hash_lookup_mheap);
if (on) {
@@ -287,8 +178,9 @@ acl_plugin_hash_acl_set_validate_heap(acl_main_t *am, int on)
}
void
-acl_plugin_hash_acl_set_trace_heap(acl_main_t *am, int on)
+acl_plugin_hash_acl_set_trace_heap(int on)
{
+ acl_main_t *am = &acl_main;
clib_mem_set_heap(hash_acl_set_heap(am));
mheap_t *h = mheap_header (am->hash_lookup_mheap);
if (on) {
@@ -299,11 +191,11 @@ acl_plugin_hash_acl_set_trace_heap(acl_main_t *am, int on)
}
void
-hash_acl_apply(acl_main_t *am, u32 sw_if_index, u8 is_input, int acl_index)
+hash_acl_apply(acl_main_t *am, u32 lc_index, int acl_index, u32 acl_position)
{
int i;
- DBG0("HASH ACL apply: sw_if_index %d is_input %d acl %d", sw_if_index, is_input, acl_index);
+ DBG0("HASH ACL apply: lc_index %d acl %d", lc_index, acl_index);
if (!am->acl_lookup_hash_initialized) {
BV (clib_bihash_init) (&am->acl_lookup_hash, "ACL plugin rule lookup bihash",
am->hash_lookup_hash_buckets, am->hash_lookup_hash_memory);
@@ -311,42 +203,36 @@ hash_acl_apply(acl_main_t *am, u32 sw_if_index, u8 is_input, int acl_index)
}
void *oldheap = hash_acl_set_heap(am);
- if (is_input) {
- vec_validate(am->input_hash_entry_vec_by_sw_if_index, sw_if_index);
- } else {
- vec_validate(am->output_hash_entry_vec_by_sw_if_index, sw_if_index);
- }
+ vec_validate(am->hash_entry_vec_by_lc_index, lc_index);
vec_validate(am->hash_acl_infos, acl_index);
- applied_hash_ace_entry_t **applied_hash_aces = get_applied_hash_aces(am, is_input, sw_if_index);
+ applied_hash_ace_entry_t **applied_hash_aces = get_applied_hash_aces(am, lc_index);
hash_acl_info_t *ha = vec_elt_at_index(am->hash_acl_infos, acl_index);
- u32 **hash_acl_applied_sw_if_index = is_input ? &ha->inbound_sw_if_index_list
- : &ha->outbound_sw_if_index_list;
+ u32 **hash_acl_applied_lc_index = &ha->lc_index_list;
int base_offset = vec_len(*applied_hash_aces);
/* Update the bitmap of the mask types with which the lookup
- needs to happen for the ACLs applied to this sw_if_index */
- applied_hash_acl_info_t **applied_hash_acls = is_input ? &am->input_applied_hash_acl_info_by_sw_if_index :
- &am->output_applied_hash_acl_info_by_sw_if_index;
- vec_validate((*applied_hash_acls), sw_if_index);
- applied_hash_acl_info_t *pal = vec_elt_at_index((*applied_hash_acls), sw_if_index);
+ needs to happen for the ACLs applied to this lc_index */
+ applied_hash_acl_info_t **applied_hash_acls = &am->applied_hash_acl_info_by_lc_index;
+ vec_validate((*applied_hash_acls), lc_index);
+ applied_hash_acl_info_t *pal = vec_elt_at_index((*applied_hash_acls), lc_index);
/* ensure the list of applied hash acls is initialized and add this acl# to it */
u32 index = vec_search(pal->applied_acls, acl_index);
if (index != ~0) {
- clib_warning("BUG: trying to apply twice acl_index %d on sw_if_index %d is_input %d",
- acl_index, sw_if_index, is_input);
+ clib_warning("BUG: trying to apply twice acl_index %d on lc_index %d, according to lc",
+ acl_index, lc_index);
goto done;
}
vec_add1(pal->applied_acls, acl_index);
- u32 index2 = vec_search((*hash_acl_applied_sw_if_index), sw_if_index);
+ u32 index2 = vec_search((*hash_acl_applied_lc_index), lc_index);
if (index2 != ~0) {
- clib_warning("BUG: trying to apply twice acl_index %d on (sw_if_index %d) is_input %d",
- acl_index, sw_if_index, is_input);
+ clib_warning("BUG: trying to apply twice acl_index %d on lc_index %d, according to hash h-acl info",
+ acl_index, lc_index);
goto done;
}
- vec_add1((*hash_acl_applied_sw_if_index), sw_if_index);
+ vec_add1((*hash_acl_applied_lc_index), lc_index);
pal->mask_type_index_bitmap = clib_bitmap_or(pal->mask_type_index_bitmap,
ha->mask_type_index_bitmap);
@@ -369,6 +255,7 @@ hash_acl_apply(acl_main_t *am, u32 sw_if_index, u8 is_input, int acl_index)
applied_hash_ace_entry_t *pae = vec_elt_at_index((*applied_hash_aces), new_index);
pae->acl_index = acl_index;
pae->ace_index = ha->rules[i].ace_index;
+ pae->acl_position = acl_position;
pae->action = ha->rules[i].action;
pae->hitcount = 0;
pae->hash_ace_info_index = i;
@@ -376,7 +263,7 @@ hash_acl_apply(acl_main_t *am, u32 sw_if_index, u8 is_input, int acl_index)
pae->next_applied_entry_index = ~0;
pae->prev_applied_entry_index = ~0;
pae->tail_applied_entry_index = ~0;
- activate_applied_ace_hash_entry(am, sw_if_index, is_input, applied_hash_aces, new_index);
+ activate_applied_ace_hash_entry(am, lc_index, applied_hash_aces, new_index);
}
applied_hash_entries_analyze(am, applied_hash_aces);
done:
@@ -403,7 +290,7 @@ find_head_applied_ace_index(applied_hash_ace_entry_t **applied_hash_aces, u32 cu
static void
move_applied_ace_hash_entry(acl_main_t *am,
- u32 sw_if_index, u8 is_input,
+ u32 lc_index,
applied_hash_ace_entry_t **applied_hash_aces,
u32 old_index, u32 new_index)
{
@@ -421,7 +308,7 @@ move_applied_ace_hash_entry(acl_main_t *am,
prev_pae->next_applied_entry_index = new_index;
} else {
/* first entry - so the hash points to it, update */
- add_del_hashtable_entry(am, sw_if_index, is_input,
+ add_del_hashtable_entry(am, lc_index,
applied_hash_aces, new_index, 1);
ASSERT(pae->tail_applied_entry_index != ~0);
}
@@ -448,12 +335,12 @@ move_applied_ace_hash_entry(acl_main_t *am,
static void
deactivate_applied_ace_hash_entry(acl_main_t *am,
- u32 sw_if_index, u8 is_input,
+ u32 lc_index,
applied_hash_ace_entry_t **applied_hash_aces,
u32 old_index)
{
applied_hash_ace_entry_t *pae = vec_elt_at_index((*applied_hash_aces), old_index);
- DBG("UNAPPLY DEACTIVATE: sw_if_index %d is_input %d, applied index %d", sw_if_index, is_input, old_index);
+ DBG("UNAPPLY DEACTIVATE: lc_index %d applied index %d", lc_index, old_index);
if (pae->prev_applied_entry_index != ~0) {
DBG("UNAPPLY = index %d has prev_applied_entry_index %d", old_index, pae->prev_applied_entry_index);
@@ -483,11 +370,11 @@ deactivate_applied_ace_hash_entry(acl_main_t *am,
DBG("Resetting the hash table entry from %d to %d, setting tail index to %d", old_index, pae->next_applied_entry_index, pae->tail_applied_entry_index);
/* unlink from the next element */
next_pae->prev_applied_entry_index = ~0;
- add_del_hashtable_entry(am, sw_if_index, is_input,
+ add_del_hashtable_entry(am, lc_index,
applied_hash_aces, pae->next_applied_entry_index, 1);
} else {
/* no next entry, so just delete the entry in the hash table */
- add_del_hashtable_entry(am, sw_if_index, is_input,
+ add_del_hashtable_entry(am, lc_index,
applied_hash_aces, old_index, 0);
}
}
@@ -499,13 +386,15 @@ deactivate_applied_ace_hash_entry(acl_main_t *am,
static void
-hash_acl_build_applied_lookup_bitmap(acl_main_t *am, u32 sw_if_index, u8 is_input)
+hash_acl_build_applied_lookup_bitmap(acl_main_t *am, u32 lc_index)
{
int i;
uword *new_lookup_bitmap = 0;
- applied_hash_acl_info_t **applied_hash_acls = is_input ? &am->input_applied_hash_acl_info_by_sw_if_index
- : &am->output_applied_hash_acl_info_by_sw_if_index;
- applied_hash_acl_info_t *pal = vec_elt_at_index((*applied_hash_acls), sw_if_index);
+
+ applied_hash_acl_info_t **applied_hash_acls = &am->applied_hash_acl_info_by_lc_index;
+ vec_validate((*applied_hash_acls), lc_index);
+ applied_hash_acl_info_t *pal = vec_elt_at_index((*applied_hash_acls), lc_index);
+
for(i=0; i < vec_len(pal->applied_acls); i++) {
u32 a_acl_index = *vec_elt_at_index((pal->applied_acls), i);
hash_acl_info_t *ha = vec_elt_at_index(am->hash_acl_infos, a_acl_index);
@@ -520,37 +409,35 @@ hash_acl_build_applied_lookup_bitmap(acl_main_t *am, u32 sw_if_index, u8 is_inpu
}
void
-hash_acl_unapply(acl_main_t *am, u32 sw_if_index, u8 is_input, int acl_index)
+hash_acl_unapply(acl_main_t *am, u32 lc_index, int acl_index)
{
int i;
- DBG0("HASH ACL unapply: sw_if_index %d is_input %d acl %d", sw_if_index, is_input, acl_index);
- applied_hash_acl_info_t **applied_hash_acls = is_input ? &am->input_applied_hash_acl_info_by_sw_if_index
- : &am->output_applied_hash_acl_info_by_sw_if_index;
- applied_hash_acl_info_t *pal = vec_elt_at_index((*applied_hash_acls), sw_if_index);
+ DBG0("HASH ACL unapply: lc_index %d acl %d", lc_index, acl_index);
+ applied_hash_acl_info_t **applied_hash_acls = &am->applied_hash_acl_info_by_lc_index;
+ applied_hash_acl_info_t *pal = vec_elt_at_index((*applied_hash_acls), lc_index);
hash_acl_info_t *ha = vec_elt_at_index(am->hash_acl_infos, acl_index);
- u32 **hash_acl_applied_sw_if_index = is_input ? &ha->inbound_sw_if_index_list
- : &ha->outbound_sw_if_index_list;
+ u32 **hash_acl_applied_lc_index = &ha->lc_index_list;
/* remove this acl# from the list of applied hash acls */
u32 index = vec_search(pal->applied_acls, acl_index);
if (index == ~0) {
- clib_warning("BUG: trying to unapply unapplied acl_index %d on sw_if_index %d is_input %d",
- acl_index, sw_if_index, is_input);
+ clib_warning("BUG: trying to unapply unapplied acl_index %d on lc_index %d, according to lc",
+ acl_index, lc_index);
return;
}
vec_del1(pal->applied_acls, index);
- u32 index2 = vec_search((*hash_acl_applied_sw_if_index), sw_if_index);
+ u32 index2 = vec_search((*hash_acl_applied_lc_index), lc_index);
if (index2 == ~0) {
- clib_warning("BUG: trying to unapply twice acl_index %d on (sw_if_index %d) is_input %d",
- acl_index, sw_if_index, is_input);
+ clib_warning("BUG: trying to unapply twice acl_index %d on lc_index %d, according to h-acl info",
+ acl_index, lc_index);
return;
}
- vec_del1((*hash_acl_applied_sw_if_index), index2);
+ vec_del1((*hash_acl_applied_lc_index), index2);
- applied_hash_ace_entry_t **applied_hash_aces = get_applied_hash_aces(am, is_input, sw_if_index);
+ applied_hash_ace_entry_t **applied_hash_aces = get_applied_hash_aces(am, lc_index);
for(i=0; i < vec_len((*applied_hash_aces)); i++) {
if (vec_elt_at_index(*applied_hash_aces,i)->acl_index == acl_index) {
@@ -559,7 +446,7 @@ hash_acl_unapply(acl_main_t *am, u32 sw_if_index, u8 is_input, int acl_index)
}
}
if (vec_len((*applied_hash_aces)) <= i) {
- DBG("Did not find applied ACL#%d at sw_if_index %d", acl_index, sw_if_index);
+ DBG("Did not find applied ACL#%d at lc_index %d", acl_index, lc_index);
/* we went all the way without finding any entries. Probably a list was empty. */
return;
}
@@ -571,14 +458,14 @@ hash_acl_unapply(acl_main_t *am, u32 sw_if_index, u8 is_input, int acl_index)
DBG("base_offset: %d, tail_offset: %d, tail_len: %d", base_offset, tail_offset, tail_len);
for(i=0; i < vec_len(ha->rules); i ++) {
- deactivate_applied_ace_hash_entry(am, sw_if_index, is_input,
+ deactivate_applied_ace_hash_entry(am, lc_index,
applied_hash_aces, base_offset + i);
}
for(i=0; i < tail_len; i ++) {
/* move the entry at tail offset to base offset */
/* that is, from (tail_offset+i) -> (base_offset+i) */
- DBG("UNAPPLY MOVE: sw_if_index %d is_input %d, applied index %d ->", sw_if_index, is_input, tail_offset+i, base_offset + i);
- move_applied_ace_hash_entry(am, sw_if_index, is_input, applied_hash_aces, tail_offset + i, base_offset + i);
+ DBG("UNAPPLY MOVE: lc_index %d, applied index %d -> %d", lc_index, tail_offset+i, base_offset + i);
+ move_applied_ace_hash_entry(am, lc_index, applied_hash_aces, tail_offset + i, base_offset + i);
}
/* trim the end of the vector */
_vec_len((*applied_hash_aces)) -= vec_len(ha->rules);
@@ -586,7 +473,7 @@ hash_acl_unapply(acl_main_t *am, u32 sw_if_index, u8 is_input, int acl_index)
applied_hash_entries_analyze(am, applied_hash_aces);
/* After deletion we might not need some of the mask-types anymore... */
- hash_acl_build_applied_lookup_bitmap(am, sw_if_index, is_input);
+ hash_acl_build_applied_lookup_bitmap(am, lc_index);
clib_mem_set_heap (oldheap);
}
@@ -600,24 +487,26 @@ hash_acl_unapply(acl_main_t *am, u32 sw_if_index, u8 is_input, int acl_index)
*/
void
-hash_acl_reapply(acl_main_t *am, u32 sw_if_index, u8 is_input, int acl_index)
+hash_acl_reapply(acl_main_t *am, u32 lc_index, int acl_index)
{
- u32 **applied_acls = is_input ? vec_elt_at_index(am->input_acl_vec_by_sw_if_index, sw_if_index)
- : vec_elt_at_index(am->output_acl_vec_by_sw_if_index, sw_if_index);
+ acl_lookup_context_t *acontext = pool_elt_at_index(am->acl_lookup_contexts, lc_index);
+ u32 **applied_acls = &acontext->acl_indices;
int i;
int start_index = vec_search((*applied_acls), acl_index);
+
+ DBG0("Start index for acl %d in lc_index %d is %d", acl_index, lc_index, start_index);
/*
* This function is called after we find out the sw_if_index where ACL is applied.
* If the by-sw_if_index vector does not have the ACL#, then it's a bug.
*/
ASSERT(start_index < vec_len(*applied_acls));
- /* unapply all the ACLs till the current one */
+ /* unapply all the ACLs at the tail side, up to the current one */
for(i = vec_len(*applied_acls) - 1; i > start_index; i--) {
- hash_acl_unapply(am, sw_if_index, is_input, *vec_elt_at_index(*applied_acls, i));
+ hash_acl_unapply(am, lc_index, *vec_elt_at_index(*applied_acls, i));
}
for(i = start_index; i < vec_len(*applied_acls); i++) {
- hash_acl_apply(am, sw_if_index, is_input, *vec_elt_at_index(*applied_acls, i));
+ hash_acl_apply(am, lc_index, *vec_elt_at_index(*applied_acls, i), i);
}
}
@@ -667,9 +556,8 @@ make_mask_and_match_from_rule(fa_5tuple_t *mask, acl_rule_t *r, hash_ace_info_t
memset(&hi->match, 0, sizeof(hi->match));
hi->action = r->is_permit;
- /* we will need to be matching based on sw_if_index, direction, and mask_type_index when applied */
- mask->pkt.sw_if_index = ~0;
- mask->pkt.is_input = 1;
+ /* we will need to be matching based on lc_index and mask_type_index when applied */
+ mask->pkt.lc_index = ~0;
/* we will assign the match of mask_type_index later when we find it*/
mask->pkt.mask_type_index_lsb = ~0;
@@ -766,6 +654,15 @@ release_mask_type_index(acl_main_t *am, u32 mask_type_index)
}
}
+int hash_acl_exists(acl_main_t *am, int acl_index)
+{
+ if (acl_index >= vec_len(am->hash_acl_infos))
+ return 0;
+
+ hash_acl_info_t *ha = vec_elt_at_index(am->hash_acl_infos, acl_index);
+ return ha->hash_acl_exists;
+}
+
void hash_acl_add(acl_main_t *am, int acl_index)
{
void *oldheap = hash_acl_set_heap(am);
@@ -775,6 +672,7 @@ void hash_acl_add(acl_main_t *am, int acl_index)
vec_validate(am->hash_acl_infos, acl_index);
hash_acl_info_t *ha = vec_elt_at_index(am->hash_acl_infos, acl_index);
memset(ha, 0, sizeof(*ha));
+ ha->hash_acl_exists = 1;
/* walk the newly added ACL entries and ensure that for each of them there
is a mask type, increment a reference count for that mask type */
@@ -808,16 +706,10 @@ void hash_acl_add(acl_main_t *am, int acl_index)
* if an ACL is applied somewhere, fill the corresponding lookup data structures.
* We need to take care if the ACL is not the last one in the vector of ACLs applied to the interface.
*/
- if (acl_index < vec_len(am->input_sw_if_index_vec_by_acl)) {
- u32 *sw_if_index;
- vec_foreach(sw_if_index, am->input_sw_if_index_vec_by_acl[acl_index]) {
- hash_acl_reapply(am, *sw_if_index, 1, acl_index);
- }
- }
- if (acl_index < vec_len(am->output_sw_if_index_vec_by_acl)) {
- u32 *sw_if_index;
- vec_foreach(sw_if_index, am->output_sw_if_index_vec_by_acl[acl_index]) {
- hash_acl_reapply(am, *sw_if_index, 0, acl_index);
+ if (acl_index < vec_len(am->lc_index_vec_by_acl)) {
+ u32 *lc_index;
+ vec_foreach(lc_index, am->lc_index_vec_by_acl[acl_index]) {
+ hash_acl_reapply(am, *lc_index, acl_index);
}
}
clib_mem_set_heap (oldheap);
@@ -841,18 +733,14 @@ void hash_acl_delete(acl_main_t *am, int acl_index)
* has to be handled.
*/
hash_acl_info_t *ha = vec_elt_at_index(am->hash_acl_infos, acl_index);
- u32 *interface_list_copy = 0;
+ u32 *lc_list_copy = 0;
{
- u32 *sw_if_index;
- interface_list_copy = vec_dup(ha->inbound_sw_if_index_list);
- vec_foreach(sw_if_index, interface_list_copy) {
- hash_acl_unapply(am, *sw_if_index, 1, acl_index);
- }
- vec_free(interface_list_copy);
- interface_list_copy = vec_dup(ha->outbound_sw_if_index_list);
- vec_foreach(sw_if_index, interface_list_copy) {
- hash_acl_unapply(am, *sw_if_index, 0, acl_index);
+ u32 *lc_index;
+ lc_list_copy = vec_dup(ha->lc_index_list);
+ vec_foreach(lc_index, lc_list_copy) {
+ hash_acl_unapply(am, *lc_index, acl_index);
}
+ vec_free(lc_list_copy);
}
/* walk the mask types for the ACL about-to-be-deleted, and decrease
@@ -862,32 +750,153 @@ void hash_acl_delete(acl_main_t *am, int acl_index)
release_mask_type_index(am, ha->rules[i].mask_type_index);
}
clib_bitmap_free(ha->mask_type_index_bitmap);
+ ha->hash_acl_exists = 0;
vec_free(ha->rules);
clib_mem_set_heap (oldheap);
}
-u8
-hash_multi_acl_match_5tuple (u32 sw_if_index, fa_5tuple_t * pkt_5tuple, int is_l2,
- int is_ip6, int is_input, u32 * acl_match_p,
- u32 * rule_match_p, u32 * trace_bitmap)
+
+void
+show_hash_acl_hash (vlib_main_t * vm, acl_main_t *am, u32 verbose)
+{
+ vlib_cli_output(vm, "\nACL lookup hash table:\n%U\n",
+ BV (format_bihash), &am->acl_lookup_hash, verbose);
+}
+
+void
+acl_plugin_show_tables_mask_type (void)
{
acl_main_t *am = &acl_main;
- applied_hash_ace_entry_t **applied_hash_aces = get_applied_hash_aces(am, is_input, sw_if_index);
- u32 match_index = multi_acl_match_get_applied_ace_index(am, pkt_5tuple);
- if (match_index < vec_len((*applied_hash_aces))) {
- applied_hash_ace_entry_t *pae = vec_elt_at_index((*applied_hash_aces), match_index);
- pae->hitcount++;
- *acl_match_p = pae->acl_index;
- *rule_match_p = pae->ace_index;
- return pae->action;
- }
- return 0;
+ vlib_main_t *vm = am->vlib_main;
+ ace_mask_type_entry_t *mte;
+
+ vlib_cli_output (vm, "Mask-type entries:");
+ /* *INDENT-OFF* */
+ pool_foreach(mte, am->ace_mask_type_pool,
+ ({
+ vlib_cli_output(vm, " %3d: %016llx %016llx %016llx %016llx %016llx %016llx refcount %d",
+ mte - am->ace_mask_type_pool,
+ mte->mask.kv.key[0], mte->mask.kv.key[1], mte->mask.kv.key[2],
+ mte->mask.kv.key[3], mte->mask.kv.key[4], mte->mask.kv.value, mte->refcount);
+ }));
+ /* *INDENT-ON* */
}
+void
+acl_plugin_show_tables_acl_hash_info (u32 acl_index)
+{
+ acl_main_t *am = &acl_main;
+ vlib_main_t *vm = am->vlib_main;
+ u32 i, j;
+ u64 *m;
+ vlib_cli_output (vm, "Mask-ready ACL representations\n");
+ for (i = 0; i < vec_len (am->hash_acl_infos); i++)
+ {
+ if ((acl_index != ~0) && (acl_index != i))
+ {
+ continue;
+ }
+ hash_acl_info_t *ha = &am->hash_acl_infos[i];
+ vlib_cli_output (vm, "acl-index %u bitmask-ready layout\n", i);
+ vlib_cli_output (vm, " applied lc_index list: %U\n",
+ format_vec32, ha->lc_index_list, "%d");
+ vlib_cli_output (vm, " mask type index bitmap: %U\n",
+ format_bitmap_hex, ha->mask_type_index_bitmap);
+ for (j = 0; j < vec_len (ha->rules); j++)
+ {
+ hash_ace_info_t *pa = &ha->rules[j];
+ m = (u64 *) & pa->match;
+ vlib_cli_output (vm,
+ " %4d: %016llx %016llx %016llx %016llx %016llx %016llx mask index %d acl %d rule %d action %d src/dst portrange not ^2: %d,%d\n",
+ j, m[0], m[1], m[2], m[3], m[4], m[5],
+ pa->mask_type_index, pa->acl_index, pa->ace_index,
+ pa->action, pa->src_portrange_not_powerof2,
+ pa->dst_portrange_not_powerof2);
+ }
+ }
+}
void
-show_hash_acl_hash (vlib_main_t * vm, acl_main_t *am, u32 verbose)
+acl_plugin_print_pae (vlib_main_t * vm, int j, applied_hash_ace_entry_t * pae)
{
- vlib_cli_output(vm, "\nACL lookup hash table:\n%U\n",
- BV (format_bihash), &am->acl_lookup_hash, verbose);
+ vlib_cli_output (vm,
+ " %4d: acl %d rule %d action %d bitmask-ready rule %d next %d prev %d tail %d hitcount %lld",
+ j, pae->acl_index, pae->ace_index, pae->action,
+ pae->hash_ace_info_index, pae->next_applied_entry_index,
+ pae->prev_applied_entry_index,
+ pae->tail_applied_entry_index, pae->hitcount);
}
+
+void
+acl_plugin_show_tables_applied_info (u32 sw_if_index)
+{
+ acl_main_t *am = &acl_main;
+ vlib_main_t *vm = am->vlib_main;
+ u32 swi; //, j;
+ vlib_cli_output (vm, "Applied lookup entries for interfaces");
+
+ for (swi = 0;
+ (swi < vec_len (am->input_lc_index_by_sw_if_index))
+ || (swi < vec_len (am->output_lc_index_by_sw_if_index)); swi++)
+ {
+ if ((sw_if_index != ~0) && (sw_if_index != swi))
+ {
+ continue;
+ }
+/*
+ vlib_cli_output (vm, "sw_if_index %d:", swi);
+ if (swi < vec_len (am->input_applied_hash_acl_info_by_sw_if_index))
+ {
+ applied_hash_acl_info_t *pal =
+ &am->input_applied_hash_acl_info_by_sw_if_index[swi];
+ vlib_cli_output (vm, " input lookup mask_type_index_bitmap: %U",
+ format_bitmap_hex, pal->mask_type_index_bitmap);
+ vlib_cli_output (vm, " input applied acls: %U", format_vec32,
+ pal->applied_acls, "%d");
+ }
+ if (swi < vec_len (am->input_hash_entry_vec_by_sw_if_index))
+ {
+ vlib_cli_output (vm, " input lookup applied entries:");
+ for (j = 0;
+ j < vec_len (am->input_hash_entry_vec_by_sw_if_index[swi]);
+ j++)
+ {
+ acl_plugin_print_pae (vm, j,
+ &am->input_hash_entry_vec_by_sw_if_index
+ [swi][j]);
+ }
+ }
+
+ if (swi < vec_len (am->output_applied_hash_acl_info_by_sw_if_index))
+ {
+ applied_hash_acl_info_t *pal =
+ &am->output_applied_hash_acl_info_by_sw_if_index[swi];
+ vlib_cli_output (vm, " output lookup mask_type_index_bitmap: %U",
+ format_bitmap_hex, pal->mask_type_index_bitmap);
+ vlib_cli_output (vm, " output applied acls: %U", format_vec32,
+ pal->applied_acls, "%d");
+ }
+ if (swi < vec_len (am->output_hash_entry_vec_by_sw_if_index))
+ {
+ vlib_cli_output (vm, " output lookup applied entries:");
+ for (j = 0;
+ j < vec_len (am->output_hash_entry_vec_by_sw_if_index[swi]);
+ j++)
+ {
+ acl_plugin_print_pae (vm, j,
+ &am->output_hash_entry_vec_by_sw_if_index
+ [swi][j]);
+ }
+ }
+*/
+ }
+}
+
+void
+acl_plugin_show_tables_bihash (u32 show_bihash_verbose)
+{
+ acl_main_t *am = &acl_main;
+ vlib_main_t *vm = am->vlib_main;
+ show_hash_acl_hash (vm, am, show_bihash_verbose);
+}
+