diff options
author | Benoît Ganne <bganne@cisco.com> | 2022-06-08 10:49:17 +0200 |
---|---|---|
committer | Damjan Marion <dmarion@0xa5.net> | 2022-06-29 21:23:38 +0000 |
commit | b03eec969f3db186fc354c3e885e51c0b24803f0 (patch) | |
tree | 54871a6dc00bad0644f75755d616f10ad67de6c2 /src | |
parent | b0057282d64a4f9ac5966fceb427057b0665b772 (diff) |
classify: use 32 bits hash
classify hash used to be stored as u64 in buffer metadata, use 32 bits
instead:
- on almost all our supported arch (x86 and arm64) we use crc32c
intrinsics to compute the final hash: we really get a 32-bits hash
- the hash itself is used to compute a 32-bits bucket index by masking
upper bits: we always discard the higher 32-bits
- this allows to increase the l2 classify buffer metadata padding such
as it does not overlap with the ip fib_index metadata anymore. This
overlap is an issue when using the 'set metadata' action in the ip
ACL node which updates both fields
Type: fix
Change-Id: I5d35bdae97b96c3cae534e859b63950fb500ff50
Signed-off-by: Benoît Ganne <bganne@cisco.com>
Diffstat (limited to 'src')
-rw-r--r-- | src/vnet/buffer.h | 4 | ||||
-rw-r--r-- | src/vnet/classify/flow_classify_node.c | 4 | ||||
-rw-r--r-- | src/vnet/classify/ip_classify.c | 4 | ||||
-rw-r--r-- | src/vnet/classify/vnet_classify.c | 12 | ||||
-rw-r--r-- | src/vnet/classify/vnet_classify.h | 21 | ||||
-rw-r--r-- | src/vnet/interface_format.c | 6 | ||||
-rw-r--r-- | src/vnet/ip/ip_in_out_acl.c | 32 | ||||
-rw-r--r-- | src/vnet/l2/l2_in_out_acl.c | 4 | ||||
-rw-r--r-- | src/vnet/l2/l2_input_classify.c | 9 | ||||
-rw-r--r-- | src/vnet/l2/l2_output_classify.c | 9 | ||||
-rw-r--r-- | src/vnet/policer/node_funcs.c | 4 |
11 files changed, 54 insertions, 55 deletions
diff --git a/src/vnet/buffer.h b/src/vnet/buffer.h index 2f2524590e9..144f62ac17a 100644 --- a/src/vnet/buffer.h +++ b/src/vnet/buffer.h @@ -320,13 +320,13 @@ typedef struct /* L2 classify */ struct { - struct opaque_l2 pad; + u32 pad[4]; /* do not overlay w/ ip.fib_index nor l2 */ union { u32 table_index; u32 opaque_index; }; - u64 hash; + u32 hash; } l2_classify; /* vnet policer */ diff --git a/src/vnet/classify/flow_classify_node.c b/src/vnet/classify/flow_classify_node.c index 4989bf0a012..c0a29992fb4 100644 --- a/src/vnet/classify/flow_classify_node.c +++ b/src/vnet/classify/flow_classify_node.c @@ -184,7 +184,7 @@ flow_classify_inline (vlib_main_t * vm, u32 table_index0; vnet_classify_table_t *t0; vnet_classify_entry_t *e0; - u64 hash0; + u32 hash0; u8 *h0; /* Stride 3 seems to work best */ @@ -193,7 +193,7 @@ flow_classify_inline (vlib_main_t * vm, vlib_buffer_t *p1 = vlib_get_buffer (vm, from[3]); vnet_classify_table_t *tp1; u32 table_index1; - u64 phash1; + u32 phash1; table_index1 = vnet_buffer (p1)->l2_classify.table_index; diff --git a/src/vnet/classify/ip_classify.c b/src/vnet/classify/ip_classify.c index a5c044521bf..9454ae91937 100644 --- a/src/vnet/classify/ip_classify.c +++ b/src/vnet/classify/ip_classify.c @@ -190,7 +190,7 @@ ip_classify_inline (vlib_main_t * vm, u32 table_index0; vnet_classify_table_t *t0; vnet_classify_entry_t *e0; - u64 hash0; + u32 hash0; u8 *h0; /* Stride 3 seems to work best */ @@ -199,7 +199,7 @@ ip_classify_inline (vlib_main_t * vm, vlib_buffer_t *p1 = vlib_get_buffer (vm, from[3]); vnet_classify_table_t *tp1; u32 table_index1; - u64 phash1; + u32 phash1; table_index1 = vnet_buffer (p1)->l2_classify.table_index; diff --git a/src/vnet/classify/vnet_classify.c b/src/vnet/classify/vnet_classify.c index 479d3024631..8281be5bd15 100644 --- a/src/vnet/classify/vnet_classify.c +++ b/src/vnet/classify/vnet_classify.c @@ -424,7 +424,7 @@ vnet_classify_add_del (vnet_classify_table_t *t, vnet_classify_entry_t *add_v, u32 value_index; int rv = 0; int i; - u64 hash, new_hash; + u32 hash, new_hash; u32 limit; u32 old_log2_pages, new_log2_pages; u32 thread_index = vlib_get_thread_index (); @@ -647,15 +647,15 @@ typedef CLIB_PACKED(struct { }) classify_data_or_mask_t; /* *INDENT-ON* */ -u64 -vnet_classify_hash_packet (vnet_classify_table_t * t, u8 * h) +u32 +vnet_classify_hash_packet (const vnet_classify_table_t *t, u8 *h) { return vnet_classify_hash_packet_inline (t, h); } vnet_classify_entry_t * -vnet_classify_find_entry (vnet_classify_table_t * t, - u8 * h, u64 hash, f64 now) +vnet_classify_find_entry (const vnet_classify_table_t *t, u8 *h, u32 hash, + f64 now) { return vnet_classify_find_entry_inline (t, h, hash, now); } @@ -3235,7 +3235,7 @@ test_classify_churn (test_classify_main_t * tm) for (i = 0; i < tm->sessions; i++) { u8 *key_minus_skip; - u64 hash; + u32 hash; vnet_classify_entry_t *e; ep = tm->entries + i; diff --git a/src/vnet/classify/vnet_classify.h b/src/vnet/classify/vnet_classify.h index c61ec8ff055..143833dfb20 100644 --- a/src/vnet/classify/vnet_classify.h +++ b/src/vnet/classify/vnet_classify.h @@ -243,7 +243,7 @@ u8 *format_classify_entry (u8 *s, va_list *args); u8 *format_classify_table (u8 * s, va_list * args); u8 *format_vnet_classify_table (u8 *s, va_list *args); -u64 vnet_classify_hash_packet (vnet_classify_table_t * t, u8 * h); +u32 vnet_classify_hash_packet (const vnet_classify_table_t *t, u8 *h); static_always_inline vnet_classify_table_t * vnet_classify_table_get (u32 table_index) @@ -253,8 +253,8 @@ vnet_classify_table_get (u32 table_index) return (pool_elt_at_index (vcm->tables, table_index)); } -static inline u64 -vnet_classify_hash_packet_inline (vnet_classify_table_t *t, const u8 *h) +static inline u32 +vnet_classify_hash_packet_inline (const vnet_classify_table_t *t, const u8 *h) { u64 xor_sum; ASSERT (t); @@ -361,7 +361,7 @@ vnet_classify_prefetch_bucket (vnet_classify_table_t * t, u64 hash) } static inline vnet_classify_entry_t * -vnet_classify_get_entry (vnet_classify_table_t * t, uword offset) +vnet_classify_get_entry (const vnet_classify_table_t *t, uword offset) { u8 *hp = clib_mem_get_heap_base (t->mheap); u8 *vp = hp + offset; @@ -383,8 +383,8 @@ vnet_classify_get_offset (vnet_classify_table_t * t, } static inline vnet_classify_entry_t * -vnet_classify_entry_at_index (vnet_classify_table_t * t, - vnet_classify_entry_t * e, u32 index) +vnet_classify_entry_at_index (const vnet_classify_table_t *t, + vnet_classify_entry_t *e, u32 index) { u8 *eu8; @@ -421,8 +421,9 @@ vnet_classify_prefetch_entry (vnet_classify_table_t * t, u64 hash) clib_prefetch_load (e); } -vnet_classify_entry_t *vnet_classify_find_entry (vnet_classify_table_t * t, - u8 * h, u64 hash, f64 now); +vnet_classify_entry_t * +vnet_classify_find_entry (const vnet_classify_table_t *t, u8 *h, u32 hash, + f64 now); static_always_inline int vnet_classify_entry_is_equal (vnet_classify_entry_t *v, const u8 *d, u8 *m, @@ -529,8 +530,8 @@ vnet_classify_entry_is_equal (vnet_classify_entry_t *v, const u8 *d, u8 *m, } static inline vnet_classify_entry_t * -vnet_classify_find_entry_inline (vnet_classify_table_t *t, const u8 *h, - u64 hash, f64 now) +vnet_classify_find_entry_inline (const vnet_classify_table_t *t, const u8 *h, + u32 hash, f64 now) { vnet_classify_entry_t *v; vnet_classify_bucket_t *b; diff --git a/src/vnet/interface_format.c b/src/vnet/interface_format.c index 304fbb8a068..0c051dd4757 100644 --- a/src/vnet/interface_format.c +++ b/src/vnet/interface_format.c @@ -602,9 +602,9 @@ format_vnet_buffer_opaque (u8 * s, va_list * args) s = format (s, "l2_classify.table_index: %d, l2_classify.opaque_index: %d, " - "l2_classify.hash: 0x%llx", - o->l2_classify.table_index, - o->l2_classify.opaque_index, o->l2_classify.hash); + "l2_classify.hash: 0x%lx", + o->l2_classify.table_index, o->l2_classify.opaque_index, + o->l2_classify.hash); vec_add1 (s, '\n'); s = format (s, "policer.index: %d", o->policer.index); diff --git a/src/vnet/ip/ip_in_out_acl.c b/src/vnet/ip/ip_in_out_acl.c index 9fc07176fa8..d8d6d768e93 100644 --- a/src/vnet/ip/ip_in_out_acl.c +++ b/src/vnet/ip/ip_in_out_acl.c @@ -130,7 +130,7 @@ ip_in_out_acl_inline_trace ( u32 sw_if_index[4]; u32 table_index[4]; vnet_classify_table_t *t[4] = { 0, 0 }; - u64 hash[4]; + u32 hash[4]; /* calculate hashes for b[0] & b[1] */ if (n_left >= 2) @@ -162,16 +162,16 @@ ip_in_out_acl_inline_trace ( if (is_output) { /* Save the rewrite length, since we are using the l2_classify struct */ - vnet_buffer (b[0])->l2_classify.pad.l2_len = + vnet_buffer (b[0])->l2.l2_len = vnet_buffer (b[0])->ip.save_rewrite_length; /* advance the match pointer so the matching happens on IP header */ - h[2] += vnet_buffer (b[0])->l2_classify.pad.l2_len; + h[2] += vnet_buffer (b[0])->l2.l2_len; /* Save the rewrite length, since we are using the l2_classify struct */ - vnet_buffer (b[1])->l2_classify.pad.l2_len = + vnet_buffer (b[1])->l2.l2_len = vnet_buffer (b[1])->ip.save_rewrite_length; /* advance the match pointer so the matching happens on IP header */ - h[3] += vnet_buffer (b[1])->l2_classify.pad.l2_len; + h[3] += vnet_buffer (b[1])->l2.l2_len; } hash[2] = vnet_classify_hash_packet_inline (t[2], (u8 *) h[2]); @@ -252,16 +252,16 @@ ip_in_out_acl_inline_trace ( if (is_output) { /* Save the rewrite length, since we are using the l2_classify struct */ - vnet_buffer (b[2])->l2_classify.pad.l2_len = + vnet_buffer (b[2])->l2.l2_len = vnet_buffer (b[2])->ip.save_rewrite_length; /* advance the match pointer so the matching happens on IP header */ - h[2] += vnet_buffer (b[2])->l2_classify.pad.l2_len; + h[2] += vnet_buffer (b[2])->l2.l2_len; /* Save the rewrite length, since we are using the l2_classify struct */ - vnet_buffer (b[3])->l2_classify.pad.l2_len = + vnet_buffer (b[3])->l2.l2_len = vnet_buffer (b[3])->ip.save_rewrite_length; /* advance the match pointer so the matching happens on IP header */ - h[3] += vnet_buffer (b[3])->l2_classify.pad.l2_len; + h[3] += vnet_buffer (b[3])->l2.l2_len; } hash[2] = vnet_classify_hash_packet_inline (t[2], (u8 *) h[2]); @@ -351,7 +351,7 @@ ip_in_out_acl_inline_trace ( /* advance the match pointer so the matching happens on IP header */ if (is_output) - h[0] += vnet_buffer (b[0])->l2_classify.pad.l2_len; + h[0] += vnet_buffer (b[0])->l2.l2_len; hash[0] = vnet_classify_hash_packet_inline (t[0], (u8 *) h[0]); @@ -458,7 +458,7 @@ ip_in_out_acl_inline_trace ( /* advance the match pointer so the matching happens on IP header */ if (is_output) - h[1] += vnet_buffer (b[1])->l2_classify.pad.l2_len; + h[1] += vnet_buffer (b[1])->l2.l2_len; hash[1] = vnet_classify_hash_packet_inline (t[1], (u8 *) h[1]); @@ -557,7 +557,7 @@ ip_in_out_acl_inline_trace ( vnet_classify_table_t *t0 = 0; vnet_classify_entry_t *e0 = 0; u32 next0 = ACL_NEXT_INDEX_DENY; - u64 hash0; + u32 hash0; sw_if_index0 = ~0 == way ? 0 : vnet_buffer (b[0])->sw_if_index[way]; table_index0 = table_index_by_sw_if_index[sw_if_index0]; @@ -573,10 +573,10 @@ ip_in_out_acl_inline_trace ( if (is_output) { /* Save the rewrite length, since we are using the l2_classify struct */ - vnet_buffer (b[0])->l2_classify.pad.l2_len = + vnet_buffer (b[0])->l2.l2_len = vnet_buffer (b[0])->ip.save_rewrite_length; /* advance the match pointer so the matching happens on IP header */ - h0 += vnet_buffer (b[0])->l2_classify.pad.l2_len; + h0 += vnet_buffer (b[0])->l2.l2_len; } vnet_buffer (b[0])->l2_classify.hash = @@ -602,7 +602,7 @@ ip_in_out_acl_inline_trace ( /* advance the match pointer so the matching happens on IP header */ if (is_output) - h0 += vnet_buffer (b[0])->l2_classify.pad.l2_len; + h0 += vnet_buffer (b[0])->l2.l2_len; e0 = vnet_classify_find_entry_inline (t0, (u8 *) h0, hash0, now); if (e0) @@ -660,7 +660,7 @@ ip_in_out_acl_inline_trace ( /* advance the match pointer so the matching happens on IP header */ if (is_output) - h0 += vnet_buffer (b[0])->l2_classify.pad.l2_len; + h0 += vnet_buffer (b[0])->l2.l2_len; hash0 = vnet_classify_hash_packet_inline (t0, (u8 *) h0); e0 = vnet_classify_find_entry_inline diff --git a/src/vnet/l2/l2_in_out_acl.c b/src/vnet/l2/l2_in_out_acl.c index f8293c1feee..7307a6802a2 100644 --- a/src/vnet/l2/l2_in_out_acl.c +++ b/src/vnet/l2/l2_in_out_acl.c @@ -278,7 +278,7 @@ l2_in_out_acl_node_fn (vlib_main_t * vm, u32 table_index0; vnet_classify_table_t *t0; vnet_classify_entry_t *e0; - u64 hash0; + u32 hash0; u8 *h0; u8 error0; @@ -288,7 +288,7 @@ l2_in_out_acl_node_fn (vlib_main_t * vm, vlib_buffer_t *p1 = vlib_get_buffer (vm, from[3]); vnet_classify_table_t *tp1; u32 table_index1; - u64 phash1; + u32 phash1; table_index1 = vnet_buffer (p1)->l2_classify.table_index; diff --git a/src/vnet/l2/l2_input_classify.c b/src/vnet/l2/l2_input_classify.c index 53d46399daf..d33a0810d28 100644 --- a/src/vnet/l2/l2_input_classify.c +++ b/src/vnet/l2/l2_input_classify.c @@ -179,8 +179,7 @@ VLIB_NODE_FN (l2_input_classify_node) (vlib_main_t * vm, int type_index0, type_index1; vnet_classify_table_t *t0, *t1; u32 table_index0, table_index1; - u64 hash0, hash1; - + u32 hash0, hash1; /* prefetch next iteration */ { @@ -265,7 +264,7 @@ VLIB_NODE_FN (l2_input_classify_node) (vlib_main_t * vm, u32 type_index0; vnet_classify_table_t *t0; u32 table_index0; - u64 hash0; + u32 hash0; bi0 = from[0]; b0 = vlib_get_buffer (vm, bi0); @@ -316,14 +315,14 @@ VLIB_NODE_FN (l2_input_classify_node) (vlib_main_t * vm, u32 next0 = ~0; /* next l2 input feature, please... */ ethernet_header_t *h0; u32 table_index0; - u64 hash0; + u32 hash0; vnet_classify_table_t *t0; vnet_classify_entry_t *e0; if (PREDICT_TRUE (n_left_from > 2)) { vlib_buffer_t *p2 = vlib_get_buffer (vm, from[2]); - u64 phash2; + u32 phash2; u32 table_index2; vnet_classify_table_t *tp2; diff --git a/src/vnet/l2/l2_output_classify.c b/src/vnet/l2/l2_output_classify.c index 96d0b14753a..97beb37f351 100644 --- a/src/vnet/l2/l2_output_classify.c +++ b/src/vnet/l2/l2_output_classify.c @@ -172,8 +172,7 @@ VLIB_NODE_FN (l2_output_classify_node) (vlib_main_t * vm, int type_index0, type_index1; vnet_classify_table_t *t0, *t1; u32 table_index0, table_index1; - u64 hash0, hash1; - + u32 hash0, hash1; /* prefetch next iteration */ { @@ -257,7 +256,7 @@ VLIB_NODE_FN (l2_output_classify_node) (vlib_main_t * vm, u32 type_index0; vnet_classify_table_t *t0; u32 table_index0; - u64 hash0; + u32 hash0; bi0 = from[0]; b0 = vlib_get_buffer (vm, bi0); @@ -308,14 +307,14 @@ VLIB_NODE_FN (l2_output_classify_node) (vlib_main_t * vm, u32 next0 = ~0; ethernet_header_t *h0; u32 table_index0; - u64 hash0; + u32 hash0; vnet_classify_table_t *t0; vnet_classify_entry_t *e0; if (PREDICT_TRUE (n_left_from > 2)) { vlib_buffer_t *p2 = vlib_get_buffer (vm, from[2]); - u64 phash2; + u32 phash2; u32 table_index2; vnet_classify_table_t *tp2; diff --git a/src/vnet/policer/node_funcs.c b/src/vnet/policer/node_funcs.c index 5fa0c8563be..efa2f830f8c 100644 --- a/src/vnet/policer/node_funcs.c +++ b/src/vnet/policer/node_funcs.c @@ -527,7 +527,7 @@ policer_classify_inline (vlib_main_t * vm, u32 table_index0; vnet_classify_table_t *t0; vnet_classify_entry_t *e0; - u64 hash0; + u32 hash0; u8 *h0; u8 act0; @@ -537,7 +537,7 @@ policer_classify_inline (vlib_main_t * vm, vlib_buffer_t *p1 = vlib_get_buffer (vm, from[3]); vnet_classify_table_t *tp1; u32 table_index1; - u64 phash1; + u32 phash1; table_index1 = vnet_buffer (p1)->l2_classify.table_index; |