#include <stdint.h>
+#include <vlib/unix/plugin.h>
#include <plugins/acl/acl.h>
#include <plugins/acl/fa_node.h>
#include <plugins/acl/hash_lookup_private.h>
-
-/* check if a given ACL exists */
-
-#ifdef ACL_PLUGIN_EXTERNAL_EXPORTS
-
-/*
- * Define a pointer to the acl_main which will be filled during the initialization.
- */
-acl_main_t *p_acl_main = 0;
-
-/*
- * If the file is included more than once, the symbol collision will make the problem obvious.
- * If the include is done only once, it is just a lonely null var
- * sitting around.
- */
-void *ERROR_ACL_PLUGIN_EXPORTS_FILE_MUST_BE_INCLUDED_ONLY_IN_ONE_PLACE = 0;
-
-u8 (*acl_plugin_acl_exists) (u32 acl_index);
-#else
-u8 acl_plugin_acl_exists (u32 acl_index);
-#endif
-
-
-/*
- * If you are using ACL plugin, get this unique ID first,
- * so you can identify yourself when creating the lookup contexts.
- */
-
-#ifdef ACL_PLUGIN_EXTERNAL_EXPORTS
-u32 (*acl_plugin_register_user_module) (char *caller_module_string, char *val1_label, char *val2_label);
-#else
-u32 acl_plugin_register_user_module (char *caller_module_string, char *val1_label, char *val2_label);
-#endif
-
-/*
- * Allocate a new lookup context index.
- * Supply the id assigned to your module during registration,
- * and two values of your choice identifying instances
- * of use within your module. They are useful for debugging.
- */
-#ifdef ACL_PLUGIN_EXTERNAL_EXPORTS
-int (*acl_plugin_get_lookup_context_index) (u32 acl_user_id, u32 val1, u32 val2);
-#else
-int acl_plugin_get_lookup_context_index (u32 acl_user_id, u32 val1, u32 val2);
-#endif
-
-/*
- * Release the lookup context index and destroy
- * any asssociated data structures.
- */
-#ifdef ACL_PLUGIN_EXTERNAL_EXPORTS
-void (*acl_plugin_put_lookup_context_index) (u32 lc_index);
-#else
-void acl_plugin_put_lookup_context_index (u32 lc_index);
-#endif
-
-/*
- * Prepare the sequential vector of ACL#s to lookup within a given context.
- * Any existing list will be overwritten. acl_list is a vector.
- */
-#ifdef ACL_PLUGIN_EXTERNAL_EXPORTS
-int (*acl_plugin_set_acl_vec_for_context) (u32 lc_index, u32 *acl_list);
-#else
-int acl_plugin_set_acl_vec_for_context (u32 lc_index, u32 *acl_list);
-#endif
-
-/* Fill the 5-tuple from the packet */
-
-#ifdef ACL_PLUGIN_EXTERNAL_EXPORTS
-void (*acl_plugin_fill_5tuple) (u32 lc_index, vlib_buffer_t * b0, int is_ip6, int is_input,
- int is_l2_path, fa_5tuple_opaque_t * p5tuple_pkt);
-#else
-void acl_plugin_fill_5tuple (u32 lc_index, vlib_buffer_t * b0, int is_ip6, int is_input,
- int is_l2_path, fa_5tuple_opaque_t * p5tuple_pkt);
-#endif
-
-#ifdef ACL_PLUGIN_DEFINED_BELOW_IN_FILE
-static inline
-void acl_plugin_fill_5tuple_inline (u32 lc_index, vlib_buffer_t * b0, int is_ip6, int is_input,
- int is_l2_path, fa_5tuple_opaque_t * p5tuple_pkt) {
- /* FIXME: normally the inlined version of filling in the 5-tuple. But for now just call the non-inlined version */
- acl_plugin_fill_5tuple(lc_index, b0, is_ip6, is_input, is_l2_path, p5tuple_pkt);
-}
-#endif
-
-
-#ifdef ACL_PLUGIN_EXTERNAL_EXPORTS
-int (*acl_plugin_match_5tuple) (u32 lc_index,
- fa_5tuple_opaque_t * pkt_5tuple,
- int is_ip6, u8 * r_action,
- u32 * r_acl_pos_p,
- u32 * r_acl_match_p,
- u32 * r_rule_match_p,
- u32 * trace_bitmap);
-#else
-int acl_plugin_match_5tuple (u32 lc_index,
- fa_5tuple_opaque_t * pkt_5tuple,
- int is_ip6, u8 * r_action,
- u32 * r_acl_pos_p,
- u32 * r_acl_match_p,
- u32 * r_rule_match_p,
- u32 * trace_bitmap);
-#endif
-
-#ifdef ACL_PLUGIN_DEFINED_BELOW_IN_FILE
-static inline int
-acl_plugin_match_5tuple_inline (u32 lc_index,
- fa_5tuple_opaque_t * pkt_5tuple,
- int is_ip6, u8 * r_action,
- u32 * r_acl_pos_p,
- u32 * r_acl_match_p,
- u32 * r_rule_match_p,
- u32 * trace_bitmap) {
- return acl_plugin_match_5tuple(lc_index, pkt_5tuple, is_ip6, r_action, r_acl_pos_p, r_acl_match_p, r_rule_match_p, trace_bitmap);
-}
-#endif
-
-#ifdef ACL_PLUGIN_EXTERNAL_EXPORTS
+#include <plugins/acl/exported_types.h>
#define LOAD_SYMBOL_FROM_PLUGIN_TO(p, s, st) \
({ \
#define LOAD_SYMBOL(s) LOAD_SYMBOL_FROM_PLUGIN_TO("acl_plugin.so", s, s)
-static inline clib_error_t * acl_plugin_exports_init (void)
-{
- LOAD_SYMBOL_FROM_PLUGIN_TO("acl_plugin.so", acl_main, p_acl_main);
- LOAD_SYMBOL(acl_plugin_acl_exists);
- LOAD_SYMBOL(acl_plugin_register_user_module);
- LOAD_SYMBOL(acl_plugin_get_lookup_context_index);
- LOAD_SYMBOL(acl_plugin_put_lookup_context_index);
- LOAD_SYMBOL(acl_plugin_set_acl_vec_for_context);
- LOAD_SYMBOL(acl_plugin_fill_5tuple);
- LOAD_SYMBOL(acl_plugin_match_5tuple);
- return 0;
-}
-
-#endif
+static inline clib_error_t * acl_plugin_exports_init (acl_plugin_methods_t *m)
+{
+ acl_plugin_methods_vtable_init_fn_t mvi;
+ LOAD_SYMBOL_FROM_PLUGIN_TO("acl_plugin.so", acl_plugin_methods_vtable_init, mvi);
+ return (mvi(m));
+}
always_inline void *
get_ptr_to_offset (vlib_buffer_t * b0, int offset)
}
always_inline void
-acl_plugin_fill_5tuple_inline (u32 lc_index, vlib_buffer_t * b0, int is_ip6,
+acl_plugin_fill_5tuple_inline (void *p_acl_main, u32 lc_index, vlib_buffer_t * b0, int is_ip6,
int is_input, int is_l2_path, fa_5tuple_opaque_t * p5tuple_pkt)
{
acl_main_t *am = p_acl_main;
}
always_inline int
-acl_plugin_single_acl_match_5tuple (u32 acl_index, fa_5tuple_t * pkt_5tuple,
+acl_plugin_single_acl_match_5tuple (void *p_acl_main, u32 acl_index, fa_5tuple_t * pkt_5tuple,
int is_ip6, u8 * r_action, u32 * r_acl_match_p,
u32 * r_rule_match_p, u32 * trace_bitmap)
{
}
always_inline int
-linear_multi_acl_match_5tuple (u32 lc_index, fa_5tuple_t * pkt_5tuple,
+linear_multi_acl_match_5tuple (void *p_acl_main, u32 lc_index, fa_5tuple_t * pkt_5tuple,
int is_ip6, u8 *r_action, u32 *acl_pos_p, u32 * acl_match_p,
u32 * rule_match_p, u32 * trace_bitmap)
{
((r->dst_port_or_code_first <= match->l4.port[1]) && r->dst_port_or_code_last >= match->l4.port[1]) );
}
+always_inline int
+single_rule_match_5tuple (acl_rule_t * r, int is_ip6, fa_5tuple_t * pkt_5tuple)
+{
+ if (is_ip6 != r->is_ipv6)
+ {
+ return 0;
+ }
+
+ if (is_ip6)
+ {
+ if (!fa_acl_match_ip6_addr
+ (&pkt_5tuple->ip6_addr[1], &r->dst.ip6, r->dst_prefixlen))
+ return 0;
+ if (!fa_acl_match_ip6_addr
+ (&pkt_5tuple->ip6_addr[0], &r->src.ip6, r->src_prefixlen))
+ return 0;
+ }
+ else
+ {
+ if (!fa_acl_match_ip4_addr
+ (&pkt_5tuple->ip4_addr[1], &r->dst.ip4, r->dst_prefixlen))
+ return 0;
+ if (!fa_acl_match_ip4_addr
+ (&pkt_5tuple->ip4_addr[0], &r->src.ip4, r->src_prefixlen))
+ return 0;
+ }
+
+ if (r->proto)
+ {
+ if (pkt_5tuple->l4.proto != r->proto)
+ return 0;
+
+ /* A sanity check just to ensure we are about to match the ports extracted from the packet */
+ if (PREDICT_FALSE (!pkt_5tuple->pkt.l4_valid))
+ return 0;
+
+
+ if (!fa_acl_match_port
+ (pkt_5tuple->l4.port[0], r->src_port_or_type_first,
+ r->src_port_or_type_last, pkt_5tuple->pkt.is_ip6))
+ return 0;
+
+
+ if (!fa_acl_match_port
+ (pkt_5tuple->l4.port[1], r->dst_port_or_code_first,
+ r->dst_port_or_code_last, pkt_5tuple->pkt.is_ip6))
+ return 0;
+
+ if (pkt_5tuple->pkt.tcp_flags_valid
+ && ((pkt_5tuple->pkt.tcp_flags & r->tcp_flags_mask) !=
+ r->tcp_flags_value))
+ return 0;
+ }
+ /* everything matches! */
+ return 1;
+}
+
always_inline u32
-multi_acl_match_get_applied_ace_index(acl_main_t *am, fa_5tuple_t *match)
+multi_acl_match_get_applied_ace_index (acl_main_t * am, int is_ip6, fa_5tuple_t * match)
{
clib_bihash_kv_48_8_t kv;
clib_bihash_kv_48_8_t result;
- fa_5tuple_t *kv_key = (fa_5tuple_t *)kv.key;
- hash_acl_lookup_value_t *result_val = (hash_acl_lookup_value_t *)&result.value;
- u64 *pmatch = (u64 *)match;
+ fa_5tuple_t *kv_key = (fa_5tuple_t *) kv.key;
+ hash_acl_lookup_value_t *result_val =
+ (hash_acl_lookup_value_t *) & result.value;
+ u64 *pmatch = (u64 *) match;
u64 *pmask;
u64 *pkey;
- int mask_type_index;
- u32 curr_match_index = ~0;
+ int mask_type_index, order_index;
+ u32 curr_match_index = (~0 - 1);
+
+
u32 lc_index = match->pkt.lc_index;
- applied_hash_ace_entry_t **applied_hash_aces = vec_elt_at_index(am->hash_entry_vec_by_lc_index, match->pkt.lc_index);
- applied_hash_acl_info_t **applied_hash_acls = &am->applied_hash_acl_info_by_lc_index;
+ applied_hash_ace_entry_t **applied_hash_aces =
+ vec_elt_at_index (am->hash_entry_vec_by_lc_index, lc_index);
- DBG("TRYING TO MATCH: %016llx %016llx %016llx %016llx %016llx %016llx",
- pmatch[0], pmatch[1], pmatch[2], pmatch[3], pmatch[4], pmatch[5]);
+ hash_applied_mask_info_t **hash_applied_mask_info_vec =
+ vec_elt_at_index (am->hash_applied_mask_info_vec_by_lc_index, lc_index);
- for(mask_type_index=0; mask_type_index < pool_len(am->ace_mask_type_pool); mask_type_index++) {
- if (!clib_bitmap_get(vec_elt_at_index((*applied_hash_acls), lc_index)->mask_type_index_bitmap, mask_type_index)) {
- /* This bit is not set. Avoid trying to match */
- continue;
- }
- ace_mask_type_entry_t *mte = vec_elt_at_index(am->ace_mask_type_pool, mask_type_index);
- pmatch = (u64 *)match;
- pmask = (u64 *)&mte->mask;
- pkey = (u64 *)kv.key;
- /*
- * unrolling the below loop results in a noticeable performance increase.
- int i;
- for(i=0; i<6; i++) {
- kv.key[i] = pmatch[i] & pmask[i];
- }
- */
-
- *pkey++ = *pmatch++ & *pmask++;
- *pkey++ = *pmatch++ & *pmask++;
- *pkey++ = *pmatch++ & *pmask++;
- *pkey++ = *pmatch++ & *pmask++;
- *pkey++ = *pmatch++ & *pmask++;
- *pkey++ = *pmatch++ & *pmask++;
-
- kv_key->pkt.mask_type_index_lsb = mask_type_index;
- DBG(" KEY %3d: %016llx %016llx %016llx %016llx %016llx %016llx", mask_type_index,
- kv.key[0], kv.key[1], kv.key[2], kv.key[3], kv.key[4], kv.key[5]);
- int res = clib_bihash_search_48_8 (&am->acl_lookup_hash, &kv, &result);
- if (res == 0) {
- DBG("ACL-MATCH! result_val: %016llx", result_val->as_u64);
- if (result_val->applied_entry_index < curr_match_index) {
- if (PREDICT_FALSE(result_val->need_portrange_check)) {
- /*
- * This is going to be slow, since we can have multiple superset
- * entries for narrow-ish portranges, e.g.:
- * 0..42 100..400, 230..60000,
- * so we need to walk linearly and check if they match.
- */
-
- u32 curr_index = result_val->applied_entry_index;
- while ((curr_index != ~0) && !match_portranges(am, match, curr_index)) {
- /* while no match and there are more entries, walk... */
- applied_hash_ace_entry_t *pae = vec_elt_at_index((*applied_hash_aces),curr_index);
- DBG("entry %d did not portmatch, advancing to %d", curr_index, pae->next_applied_entry_index);
- curr_index = pae->next_applied_entry_index;
- }
- if (curr_index < curr_match_index) {
- DBG("The index %d is the new candidate in portrange matches.", curr_index);
- curr_match_index = curr_index;
- } else {
- DBG("Curr portmatch index %d is too big vs. current matched one %d", curr_index, curr_match_index);
- }
- } else {
- /* The usual path is here. Found an entry in front of the current candiate - so it's a new one */
- DBG("This match is the new candidate");
- curr_match_index = result_val->applied_entry_index;
- if (!result_val->shadowed) {
- /* new result is known to not be shadowed, so no point to look up further */
- break;
- }
- }
- }
+ hash_applied_mask_info_t *minfo;
+
+ DBG ("TRYING TO MATCH: %016llx %016llx %016llx %016llx %016llx %016llx",
+ pmatch[0], pmatch[1], pmatch[2], pmatch[3], pmatch[4], pmatch[5]);
+
+ for (order_index = 0; order_index < vec_len ((*hash_applied_mask_info_vec));
+ order_index++)
+ {
+ minfo = vec_elt_at_index ((*hash_applied_mask_info_vec), order_index);
+ if (minfo->first_rule_index > curr_match_index)
+ {
+ /* Index in this and following (by construction) partitions are greater than our candidate, Avoid trying to match! */
+ break;
+ }
+
+ mask_type_index = minfo->mask_type_index;
+ ace_mask_type_entry_t *mte =
+ vec_elt_at_index (am->ace_mask_type_pool, mask_type_index);
+ pmatch = (u64 *) match;
+ pmask = (u64 *) & mte->mask;
+ pkey = (u64 *) kv.key;
+ /*
+ * unrolling the below loop results in a noticeable performance increase.
+ int i;
+ for(i=0; i<6; i++) {
+ kv.key[i] = pmatch[i] & pmask[i];
+ }
+ */
+
+ *pkey++ = *pmatch++ & *pmask++;
+ *pkey++ = *pmatch++ & *pmask++;
+ *pkey++ = *pmatch++ & *pmask++;
+ *pkey++ = *pmatch++ & *pmask++;
+ *pkey++ = *pmatch++ & *pmask++;
+ *pkey++ = *pmatch++ & *pmask++;
+
+ kv_key->pkt.mask_type_index_lsb = mask_type_index;
+ int res =
+ clib_bihash_search_inline_2_48_8 (&am->acl_lookup_hash, &kv, &result);
+
+ if (res == 0)
+ {
+ /* There is a hit in the hash, so check the collision vector */
+ u32 curr_index = result_val->applied_entry_index;
+ applied_hash_ace_entry_t *pae =
+ vec_elt_at_index ((*applied_hash_aces), curr_index);
+ collision_match_rule_t *crs = pae->colliding_rules;
+ int i;
+ for (i = 0; i < vec_len (crs); i++)
+ {
+ if (crs[i].applied_entry_index >= curr_match_index)
+ {
+ continue;
+ }
+ if (single_rule_match_5tuple (&crs[i].rule, is_ip6, match))
+ {
+ curr_match_index = crs[i].applied_entry_index;
+ }
+ }
+ }
}
- }
- DBG("MATCH-RESULT: %d", curr_match_index);
+ DBG ("MATCH-RESULT: %d", curr_match_index);
return curr_match_index;
}
always_inline int
-hash_multi_acl_match_5tuple (u32 lc_index, fa_5tuple_t * pkt_5tuple,
+hash_multi_acl_match_5tuple (void *p_acl_main, u32 lc_index, fa_5tuple_t * pkt_5tuple,
int is_ip6, u8 *action, u32 *acl_pos_p, u32 * acl_match_p,
u32 * rule_match_p, u32 * trace_bitmap)
{
acl_main_t *am = p_acl_main;
applied_hash_ace_entry_t **applied_hash_aces = vec_elt_at_index(am->hash_entry_vec_by_lc_index, lc_index);
- u32 match_index = multi_acl_match_get_applied_ace_index(am, pkt_5tuple);
+ u32 match_index = multi_acl_match_get_applied_ace_index(am, is_ip6, pkt_5tuple);
if (match_index < vec_len((*applied_hash_aces))) {
applied_hash_ace_entry_t *pae = vec_elt_at_index((*applied_hash_aces), match_index);
pae->hitcount++;
always_inline int
-acl_plugin_match_5tuple_inline (u32 lc_index,
+acl_plugin_match_5tuple_inline (void *p_acl_main, u32 lc_index,
fa_5tuple_opaque_t * pkt_5tuple,
int is_ip6, u8 * r_action,
u32 * r_acl_pos_p,
acl_main_t *am = p_acl_main;
fa_5tuple_t * pkt_5tuple_internal = (fa_5tuple_t *)pkt_5tuple;
pkt_5tuple_internal->pkt.lc_index = lc_index;
- if (am->use_hash_acl_matching) {
- return hash_multi_acl_match_5tuple(lc_index, pkt_5tuple_internal, is_ip6, r_action,
+ if (PREDICT_TRUE(am->use_hash_acl_matching)) {
+ if (PREDICT_FALSE(pkt_5tuple_internal->pkt.is_nonfirst_fragment)) {
+ /*
+ * tuplemerge does not take fragments into account,
+ * and in general making fragments first class citizens has
+ * proved more overhead than it's worth - so just fall back to linear
+ * matching in that case.
+ */
+ return linear_multi_acl_match_5tuple(p_acl_main, lc_index, pkt_5tuple_internal, is_ip6, r_action,
+ r_acl_pos_p, r_acl_match_p, r_rule_match_p, trace_bitmap);
+ } else {
+ return hash_multi_acl_match_5tuple(p_acl_main, lc_index, pkt_5tuple_internal, is_ip6, r_action,
r_acl_pos_p, r_acl_match_p, r_rule_match_p, trace_bitmap);
+ }
} else {
- return linear_multi_acl_match_5tuple(lc_index, pkt_5tuple_internal, is_ip6, r_action,
+ return linear_multi_acl_match_5tuple(p_acl_main, lc_index, pkt_5tuple_internal, is_ip6, r_action,
r_acl_pos_p, r_acl_match_p, r_rule_match_p, trace_bitmap);
}
}