2 * l2_input_acl.c : layer 2 input acl processing
4 * Copyright (c) 2013 Cisco and/or its affiliates.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at:
9 * http://www.apache.org/licenses/LICENSE-2.0
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
18 #include <vlib/vlib.h>
19 #include <vnet/vnet.h>
20 #include <vnet/pg/pg.h>
21 #include <vnet/ethernet/ethernet.h>
22 #include <vnet/ethernet/packet.h>
23 #include <vnet/ip/ip_packet.h>
24 #include <vnet/ip/ip4_packet.h>
25 #include <vnet/ip/ip6_packet.h>
27 #include <vnet/l2/l2_input.h>
28 #include <vnet/l2/feat_bitmap.h>
30 #include <vppinfra/error.h>
31 #include <vppinfra/hash.h>
32 #include <vppinfra/cache.h>
34 #include <vnet/classify/vnet_classify.h>
35 #include <vnet/classify/input_acl.h>
39 // Next nodes for each feature
40 u32 feat_next_node_index[32];
42 /* convenience variables */
43 vlib_main_t * vlib_main;
44 vnet_main_t * vnet_main;
54 /* packet trace format function */
55 static u8 * format_l2_inacl_trace (u8 * s, va_list * args)
57 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
58 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
59 l2_inacl_trace_t * t = va_arg (*args, l2_inacl_trace_t *);
61 s = format (s, "INACL: sw_if_index %d, next_index %d, table %d, offset %d",
62 t->sw_if_index, t->next_index, t->table_index, t->offset);
66 l2_inacl_main_t l2_inacl_main;
68 static vlib_node_registration_t l2_inacl_node;
70 #define foreach_l2_inacl_error \
71 _(NONE, "valid input ACL packets") \
72 _(MISS, "input ACL misses") \
73 _(HIT, "input ACL hits") \
74 _(CHAIN_HIT, "input ACL hits after chain walk") \
75 _(TABLE_MISS, "input ACL table-miss drops") \
76 _(SESSION_DENY, "input ACL session deny drops")
80 #define _(sym,str) L2_INACL_ERROR_##sym,
81 foreach_l2_inacl_error
86 static char * l2_inacl_error_strings[] = {
87 #define _(sym,string) string,
88 foreach_l2_inacl_error
93 l2_inacl_node_fn (vlib_main_t * vm,
94 vlib_node_runtime_t * node,
97 u32 n_left_from, * from, * to_next;
98 acl_next_index_t next_index;
99 l2_inacl_main_t * msm = &l2_inacl_main;
100 input_acl_main_t * am = &input_acl_main;
101 vnet_classify_main_t * vcm = am->vnet_classify_main;
102 input_acl_table_id_t tid = INPUT_ACL_TABLE_L2;
103 f64 now = vlib_time_now (vm);
108 from = vlib_frame_vector_args (frame);
109 n_left_from = frame->n_vectors; /* number of packets to process */
110 next_index = node->cached_next_index;
112 /* First pass: compute hashes */
113 while (n_left_from > 2)
115 vlib_buffer_t * b0, * b1;
118 u32 sw_if_index0, sw_if_index1;
119 u32 table_index0, table_index1;
120 vnet_classify_table_t * t0, * t1;
122 /* prefetch next iteration */
124 vlib_buffer_t * p1, * p2;
126 p1 = vlib_get_buffer (vm, from[1]);
127 p2 = vlib_get_buffer (vm, from[2]);
129 vlib_prefetch_buffer_header (p1, STORE);
130 CLIB_PREFETCH (p1->data, CLIB_CACHE_LINE_BYTES, STORE);
131 vlib_prefetch_buffer_header (p2, STORE);
132 CLIB_PREFETCH (p2->data, CLIB_CACHE_LINE_BYTES, STORE);
136 b0 = vlib_get_buffer (vm, bi0);
140 b1 = vlib_get_buffer (vm, bi1);
143 sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
144 table_index0 = am->classify_table_index_by_sw_if_index[tid][sw_if_index0];
146 sw_if_index1 = vnet_buffer (b1)->sw_if_index[VLIB_RX];
147 table_index1 = am->classify_table_index_by_sw_if_index[tid][sw_if_index1];
149 t0 = pool_elt_at_index (vcm->tables, table_index0);
151 t1 = pool_elt_at_index (vcm->tables, table_index1);
153 vnet_buffer(b0)->l2_classify.hash =
154 vnet_classify_hash_packet (t0, (u8 *) h0);
156 vnet_classify_prefetch_bucket (t0, vnet_buffer(b0)->l2_classify.hash);
158 vnet_buffer(b1)->l2_classify.hash =
159 vnet_classify_hash_packet (t1, (u8 *) h1);
161 vnet_classify_prefetch_bucket (t1, vnet_buffer(b1)->l2_classify.hash);
163 vnet_buffer(b0)->l2_classify.table_index = table_index0;
165 vnet_buffer(b1)->l2_classify.table_index = table_index1;
171 while (n_left_from > 0)
178 vnet_classify_table_t * t0;
181 b0 = vlib_get_buffer (vm, bi0);
184 sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
185 table_index0 = am->classify_table_index_by_sw_if_index[tid][sw_if_index0];
187 t0 = pool_elt_at_index (vcm->tables, table_index0);
188 vnet_buffer(b0)->l2_classify.hash =
189 vnet_classify_hash_packet (t0, (u8 *) h0);
191 vnet_buffer(b0)->l2_classify.table_index = table_index0;
192 vnet_classify_prefetch_bucket (t0, vnet_buffer(b0)->l2_classify.hash);
198 next_index = node->cached_next_index;
199 from = vlib_frame_vector_args (frame);
200 n_left_from = frame->n_vectors;
202 while (n_left_from > 0)
206 vlib_get_next_frame (vm, node, next_index,
207 to_next, n_left_to_next);
209 /* Not enough load/store slots to dual loop... */
210 while (n_left_from > 0 && n_left_to_next > 0)
214 u32 next0 = ACL_NEXT_INDEX_DENY;
216 vnet_classify_table_t * t0;
217 vnet_classify_entry_t * e0;
222 /* Stride 3 seems to work best */
223 if (PREDICT_TRUE (n_left_from > 3))
225 vlib_buffer_t * p1 = vlib_get_buffer(vm, from[3]);
226 vnet_classify_table_t * tp1;
230 table_index1 = vnet_buffer(p1)->l2_classify.table_index;
232 if (PREDICT_TRUE (table_index1 != ~0))
234 tp1 = pool_elt_at_index (vcm->tables, table_index1);
235 phash1 = vnet_buffer(p1)->l2_classify.hash;
236 vnet_classify_prefetch_entry (tp1, phash1);
240 /* speculatively enqueue b0 to the current next frame */
248 b0 = vlib_get_buffer (vm, bi0);
250 table_index0 = vnet_buffer(b0)->l2_classify.table_index;
254 /* Feature bitmap update */
255 vnet_buffer(b0)->l2.feature_bitmap &= ~L2INPUT_FEAT_ACL;
257 vnet_buffer(b0)->l2_classify.opaque_index = ~0;
258 /* Determine the next node */
259 next0 = feat_bitmap_get_next_node_index(msm->feat_next_node_index,
260 vnet_buffer(b0)->l2.feature_bitmap);
262 if (PREDICT_TRUE(table_index0 != ~0))
264 hash0 = vnet_buffer(b0)->l2_classify.hash;
265 t0 = pool_elt_at_index (vcm->tables, table_index0);
267 e0 = vnet_classify_find_entry (t0, (u8 *) h0, hash0,
271 vnet_buffer(b0)->l2_classify.opaque_index
273 vlib_buffer_advance (b0, e0->advance);
275 next0 = (e0->next_index < ACL_NEXT_INDEX_N_NEXT)?
276 e0->next_index:next0;
280 error0 = (next0 == ACL_NEXT_INDEX_DENY)?
281 L2_INACL_ERROR_SESSION_DENY:L2_INACL_ERROR_NONE;
282 b0->error = node->errors[error0];
288 if (PREDICT_TRUE(t0->next_table_index != ~0))
289 t0 = pool_elt_at_index (vcm->tables,
290 t0->next_table_index);
293 next0 = (t0->miss_next_index < ACL_NEXT_INDEX_N_NEXT)?
294 t0->miss_next_index:next0;
298 error0 = (next0 == ACL_NEXT_INDEX_DENY)?
299 L2_INACL_ERROR_TABLE_MISS:L2_INACL_ERROR_NONE;
300 b0->error = node->errors[error0];
304 hash0 = vnet_classify_hash_packet (t0, (u8 *) h0);
305 e0 = vnet_classify_find_entry
306 (t0, (u8 *) h0, hash0, now);
309 vlib_buffer_advance (b0, e0->advance);
310 next0 = (e0->next_index < ACL_NEXT_INDEX_N_NEXT)?
311 e0->next_index:next0;
315 error0 = (next0 == ACL_NEXT_INDEX_DENY)?
316 L2_INACL_ERROR_SESSION_DENY:L2_INACL_ERROR_NONE;
317 b0->error = node->errors[error0];
324 if (PREDICT_FALSE((node->flags & VLIB_NODE_FLAG_TRACE)
325 && (b0->flags & VLIB_BUFFER_IS_TRACED)))
327 l2_inacl_trace_t *t =
328 vlib_add_trace (vm, node, b0, sizeof (*t));
329 t->sw_if_index = vnet_buffer(b0)->sw_if_index[VLIB_RX];
330 t->next_index = next0;
331 t->table_index = t0 ? t0 - vcm->tables : ~0;
332 t->offset = e0 ? vnet_classify_get_offset (t0, e0): ~0;
335 /* verify speculative enqueue, maybe switch current next frame */
336 vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
337 to_next, n_left_to_next,
341 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
344 vlib_node_increment_counter (vm, node->node_index,
347 vlib_node_increment_counter (vm, node->node_index,
350 vlib_node_increment_counter (vm, node->node_index,
351 L2_INACL_ERROR_CHAIN_HIT,
353 return frame->n_vectors;
356 VLIB_REGISTER_NODE (l2_inacl_node,static) = {
357 .function = l2_inacl_node_fn,
358 .name = "l2-input-acl",
359 .vector_size = sizeof (u32),
360 .format_trace = format_l2_inacl_trace,
361 .type = VLIB_NODE_TYPE_INTERNAL,
363 .n_errors = ARRAY_LEN(l2_inacl_error_strings),
364 .error_strings = l2_inacl_error_strings,
366 .n_next_nodes = ACL_NEXT_INDEX_N_NEXT,
368 /* edit / add dispositions here */
370 [ACL_NEXT_INDEX_DENY] = "error-drop",
374 VLIB_NODE_FUNCTION_MULTIARCH (l2_inacl_node, l2_inacl_node_fn)
376 clib_error_t *l2_inacl_init (vlib_main_t *vm)
378 l2_inacl_main_t * mp = &l2_inacl_main;
381 mp->vnet_main = vnet_get_main();
383 // Initialize the feature next-node indexes
384 feat_bitmap_init_next_nodes(vm,
387 l2input_get_feat_names(),
388 mp->feat_next_node_index);
393 VLIB_INIT_FUNCTION (l2_inacl_init);