summaryrefslogtreecommitdiffstats
path: root/src/plugins/acl/public_inlines.h
diff options
context:
space:
mode:
authorAndrew Yourtchenko <ayourtch@gmail.com>2019-06-13 15:23:21 +0000
committerDamjan Marion <dmarion@me.com>2019-07-24 18:16:41 +0000
commitf995c7122ba0d024b17bc3232e8edd18d5e25088 (patch)
tree1bb44ddff0d009cf5e7fa62c8418b094edcaaa79 /src/plugins/acl/public_inlines.h
parent025cd9c867bef937724535033ccdb979292b7714 (diff)
acl: implement counters
implement per-acl-number counters in the stats segment. They are created during the ACL creation, the counters are incremented in the dataplane using the new inline function with the extra parameter being the packet size. Counting in shared segment adds a noticeable overhead, so add also an API to turn the counters on. Type: feature Change-Id: I8af7b0c31a3d986b68089eb52452aed45df66c7b Signed-off-by: Andrew Yourtchenko <ayourtch@gmail.com>
Diffstat (limited to 'src/plugins/acl/public_inlines.h')
-rw-r--r--src/plugins/acl/public_inlines.h41
1 files changed, 41 insertions, 0 deletions
diff --git a/src/plugins/acl/public_inlines.h b/src/plugins/acl/public_inlines.h
index 03b64012a74..6b69bcef61e 100644
--- a/src/plugins/acl/public_inlines.h
+++ b/src/plugins/acl/public_inlines.h
@@ -682,5 +682,46 @@ acl_plugin_match_5tuple_inline (void *p_acl_main, u32 lc_index,
}
+always_inline int
+acl_plugin_match_5tuple_inline_and_count (void *p_acl_main, u32 lc_index,
+ fa_5tuple_opaque_t * pkt_5tuple,
+ int is_ip6, u8 * r_action,
+ u32 * r_acl_pos_p,
+ u32 * r_acl_match_p,
+ u32 * r_rule_match_p,
+ u32 * trace_bitmap,
+ u32 packet_size)
+{
+ acl_main_t *am = p_acl_main;
+ int ret = 0;
+ fa_5tuple_t * pkt_5tuple_internal = (fa_5tuple_t *)pkt_5tuple;
+ pkt_5tuple_internal->pkt.lc_index = lc_index;
+ if (PREDICT_TRUE(am->use_hash_acl_matching)) {
+ if (PREDICT_FALSE(pkt_5tuple_internal->pkt.is_nonfirst_fragment)) {
+ /*
+ * tuplemerge does not take fragments into account,
+ * and in general making fragments first class citizens has
+ * proved more overhead than it's worth - so just fall back to linear
+ * matching in that case.
+ */
+ ret = linear_multi_acl_match_5tuple(p_acl_main, lc_index, pkt_5tuple_internal, is_ip6, r_action,
+ r_acl_pos_p, r_acl_match_p, r_rule_match_p, trace_bitmap);
+ } else {
+ ret = hash_multi_acl_match_5tuple(p_acl_main, lc_index, pkt_5tuple_internal, is_ip6, r_action,
+ r_acl_pos_p, r_acl_match_p, r_rule_match_p, trace_bitmap);
+ }
+ } else {
+ ret = linear_multi_acl_match_5tuple(p_acl_main, lc_index, pkt_5tuple_internal, is_ip6, r_action,
+ r_acl_pos_p, r_acl_match_p, r_rule_match_p, trace_bitmap);
+ }
+ if (PREDICT_TRUE(ret)) {
+ u16 thread_index = os_get_thread_index ();
+ vlib_increment_combined_counter(am->combined_acl_counters + *r_acl_match_p, thread_index, *r_rule_match_p, 1, packet_size);
+ }
+ return ret;
+}
+
+
+
#endif