2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
15 #include <vnet/ip/ip.h>
16 #include <vnet/classify/vnet_classify.h>
17 #include <vnet/classify/in_out_acl.h>
25 } ip_in_out_acl_trace_t;
27 /* packet trace format function */
29 format_ip_in_out_acl_trace (u8 * s, u32 is_output, va_list * args)
31 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
32 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
33 ip_in_out_acl_trace_t *t = va_arg (*args, ip_in_out_acl_trace_t *);
35 s = format (s, "%s: sw_if_index %d, next_index %d, table %d, offset %d",
36 is_output ? "OUTACL" : "INACL",
37 t->sw_if_index, t->next_index, t->table_index, t->offset);
42 format_ip_inacl_trace (u8 * s, va_list * args)
44 return format_ip_in_out_acl_trace (s, 0 /* is_output */ , args);
48 format_ip_outacl_trace (u8 * s, va_list * args)
50 return format_ip_in_out_acl_trace (s, 1 /* is_output */ , args);
53 vlib_node_registration_t ip4_inacl_node;
54 vlib_node_registration_t ip4_outacl_node;
55 vlib_node_registration_t ip6_inacl_node;
56 vlib_node_registration_t ip6_outacl_node;
58 #define foreach_ip_inacl_error \
59 _(MISS, "input ACL misses") \
60 _(HIT, "input ACL hits") \
61 _(CHAIN_HIT, "input ACL hits after chain walk")
63 #define foreach_ip_outacl_error \
64 _(MISS, "output ACL misses") \
65 _(HIT, "output ACL hits") \
66 _(CHAIN_HIT, "output ACL hits after chain walk")
70 #define _(sym,str) IP_INACL_ERROR_##sym,
71 foreach_ip_inacl_error
76 static char *ip_inacl_error_strings[] = {
77 #define _(sym,string) string,
78 foreach_ip_inacl_error
84 #define _(sym,str) IP_OUTACL_ERROR_##sym,
85 foreach_ip_outacl_error
90 static char *ip_outacl_error_strings[] = {
91 #define _(sym,string) string,
92 foreach_ip_outacl_error
97 ip_in_out_acl_inline (vlib_main_t * vm,
98 vlib_node_runtime_t * node, vlib_frame_t * frame,
99 int is_ip4, int is_output)
101 u32 n_left_from, *from, *to_next;
102 acl_next_index_t next_index;
103 in_out_acl_main_t *am = &in_out_acl_main;
104 vnet_classify_main_t *vcm = am->vnet_classify_main;
105 f64 now = vlib_time_now (vm);
109 in_out_acl_table_id_t tid;
110 vlib_node_runtime_t *error_node;
113 n_next_nodes = node->n_next_nodes;
117 tid = IN_OUT_ACL_TABLE_IP4;
118 error_node = vlib_node_get_runtime (vm, ip4_input_node.index);
122 tid = IN_OUT_ACL_TABLE_IP6;
123 error_node = vlib_node_get_runtime (vm, ip6_input_node.index);
126 from = vlib_frame_vector_args (frame);
127 n_left_from = frame->n_vectors;
129 /* First pass: compute hashes */
131 while (n_left_from > 2)
133 vlib_buffer_t *b0, *b1;
136 u32 sw_if_index0, sw_if_index1;
137 u32 table_index0, table_index1;
138 vnet_classify_table_t *t0, *t1;
140 /* prefetch next iteration */
142 vlib_buffer_t *p1, *p2;
144 p1 = vlib_get_buffer (vm, from[1]);
145 p2 = vlib_get_buffer (vm, from[2]);
147 vlib_prefetch_buffer_header (p1, STORE);
148 CLIB_PREFETCH (p1->data, CLIB_CACHE_LINE_BYTES, STORE);
149 vlib_prefetch_buffer_header (p2, STORE);
150 CLIB_PREFETCH (p2->data, CLIB_CACHE_LINE_BYTES, STORE);
154 b0 = vlib_get_buffer (vm, bi0);
157 b1 = vlib_get_buffer (vm, bi1);
160 vnet_buffer (b0)->sw_if_index[is_output ? VLIB_TX : VLIB_RX];
162 am->classify_table_index_by_sw_if_index[is_output][tid][sw_if_index0];
165 vnet_buffer (b1)->sw_if_index[is_output ? VLIB_TX : VLIB_RX];
167 am->classify_table_index_by_sw_if_index[is_output][tid][sw_if_index1];
169 t0 = pool_elt_at_index (vcm->tables, table_index0);
171 t1 = pool_elt_at_index (vcm->tables, table_index1);
173 if (t0->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
174 h0 = (void *) vlib_buffer_get_current (b0) + t0->current_data_offset;
180 /* Save the rewrite length, since we are using the l2_classify struct */
181 vnet_buffer (b0)->l2_classify.pad.l2_len =
182 vnet_buffer (b0)->ip.save_rewrite_length;
183 /* advance the match pointer so the matching happens on IP header */
184 h0 += vnet_buffer (b0)->l2_classify.pad.l2_len;
187 vnet_buffer (b0)->l2_classify.hash =
188 vnet_classify_hash_packet (t0, (u8 *) h0);
190 vnet_classify_prefetch_bucket (t0, vnet_buffer (b0)->l2_classify.hash);
192 if (t1->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
193 h1 = (void *) vlib_buffer_get_current (b1) + t1->current_data_offset;
199 /* Save the rewrite length, since we are using the l2_classify struct */
200 vnet_buffer (b1)->l2_classify.pad.l2_len =
201 vnet_buffer (b1)->ip.save_rewrite_length;
202 /* advance the match pointer so the matching happens on IP header */
203 h1 += vnet_buffer (b1)->l2_classify.pad.l2_len;
206 vnet_buffer (b1)->l2_classify.hash =
207 vnet_classify_hash_packet (t1, (u8 *) h1);
209 vnet_classify_prefetch_bucket (t1, vnet_buffer (b1)->l2_classify.hash);
211 vnet_buffer (b0)->l2_classify.table_index = table_index0;
213 vnet_buffer (b1)->l2_classify.table_index = table_index1;
219 while (n_left_from > 0)
226 vnet_classify_table_t *t0;
229 b0 = vlib_get_buffer (vm, bi0);
232 vnet_buffer (b0)->sw_if_index[is_output ? VLIB_TX : VLIB_RX];
234 am->classify_table_index_by_sw_if_index[is_output][tid][sw_if_index0];
236 t0 = pool_elt_at_index (vcm->tables, table_index0);
238 if (t0->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
239 h0 = (void *) vlib_buffer_get_current (b0) + t0->current_data_offset;
245 /* Save the rewrite length, since we are using the l2_classify struct */
246 vnet_buffer (b0)->l2_classify.pad.l2_len =
247 vnet_buffer (b0)->ip.save_rewrite_length;
248 /* advance the match pointer so the matching happens on IP header */
249 h0 += vnet_buffer (b0)->l2_classify.pad.l2_len;
252 vnet_buffer (b0)->l2_classify.hash =
253 vnet_classify_hash_packet (t0, (u8 *) h0);
255 vnet_buffer (b0)->l2_classify.table_index = table_index0;
256 vnet_classify_prefetch_bucket (t0, vnet_buffer (b0)->l2_classify.hash);
262 next_index = node->cached_next_index;
263 from = vlib_frame_vector_args (frame);
264 n_left_from = frame->n_vectors;
266 while (n_left_from > 0)
270 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
272 /* Not enough load/store slots to dual loop... */
273 while (n_left_from > 0 && n_left_to_next > 0)
277 u32 next0 = ACL_NEXT_INDEX_DENY;
279 vnet_classify_table_t *t0;
280 vnet_classify_entry_t *e0;
285 /* Stride 3 seems to work best */
286 if (PREDICT_TRUE (n_left_from > 3))
288 vlib_buffer_t *p1 = vlib_get_buffer (vm, from[3]);
289 vnet_classify_table_t *tp1;
293 table_index1 = vnet_buffer (p1)->l2_classify.table_index;
295 if (PREDICT_TRUE (table_index1 != ~0))
297 tp1 = pool_elt_at_index (vcm->tables, table_index1);
298 phash1 = vnet_buffer (p1)->l2_classify.hash;
299 vnet_classify_prefetch_entry (tp1, phash1);
304 /* speculatively enqueue b0 to the current next frame */
312 b0 = vlib_get_buffer (vm, bi0);
313 table_index0 = vnet_buffer (b0)->l2_classify.table_index;
316 vnet_get_config_data (am->vnet_config_main[is_output][tid],
317 &b0->current_config_index, &next0,
318 /* # bytes of config data */ 0);
320 vnet_buffer (b0)->l2_classify.opaque_index = ~0;
322 if (PREDICT_TRUE (table_index0 != ~0))
324 hash0 = vnet_buffer (b0)->l2_classify.hash;
325 t0 = pool_elt_at_index (vcm->tables, table_index0);
327 if (t0->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
329 (void *) vlib_buffer_get_current (b0) +
330 t0->current_data_offset;
334 /* advance the match pointer so the matching happens on IP header */
336 h0 += vnet_buffer (b0)->l2_classify.pad.l2_len;
338 e0 = vnet_classify_find_entry (t0, (u8 *) h0, hash0, now);
341 vnet_buffer (b0)->l2_classify.opaque_index
343 vlib_buffer_advance (b0, e0->advance);
345 next0 = (e0->next_index < n_next_nodes) ?
346 e0->next_index : next0;
351 error0 = (next0 == ACL_NEXT_INDEX_DENY) ?
352 (is_output ? IP4_ERROR_OUTACL_SESSION_DENY :
353 IP4_ERROR_INACL_SESSION_DENY) : IP4_ERROR_NONE;
355 error0 = (next0 == ACL_NEXT_INDEX_DENY) ?
356 (is_output ? IP6_ERROR_OUTACL_SESSION_DENY :
357 IP6_ERROR_INACL_SESSION_DENY) : IP6_ERROR_NONE;
358 b0->error = error_node->errors[error0];
362 if (e0->action == CLASSIFY_ACTION_SET_IP4_FIB_INDEX ||
363 e0->action == CLASSIFY_ACTION_SET_IP6_FIB_INDEX)
364 vnet_buffer (b0)->sw_if_index[VLIB_TX] = e0->metadata;
365 else if (e0->action == CLASSIFY_ACTION_SET_METADATA)
366 vnet_buffer (b0)->ip.adj_index[VLIB_TX] =
374 if (PREDICT_TRUE (t0->next_table_index != ~0))
375 t0 = pool_elt_at_index (vcm->tables,
376 t0->next_table_index);
379 next0 = (t0->miss_next_index < n_next_nodes) ?
380 t0->miss_next_index : next0;
385 error0 = (next0 == ACL_NEXT_INDEX_DENY) ?
386 (is_output ? IP4_ERROR_OUTACL_TABLE_MISS :
387 IP4_ERROR_INACL_TABLE_MISS) : IP4_ERROR_NONE;
389 error0 = (next0 == ACL_NEXT_INDEX_DENY) ?
390 (is_output ? IP6_ERROR_OUTACL_TABLE_MISS :
391 IP6_ERROR_INACL_TABLE_MISS) : IP6_ERROR_NONE;
392 b0->error = error_node->errors[error0];
396 if (t0->current_data_flag ==
397 CLASSIFY_FLAG_USE_CURR_DATA)
399 (void *) vlib_buffer_get_current (b0) +
400 t0->current_data_offset;
404 hash0 = vnet_classify_hash_packet (t0, (u8 *) h0);
405 e0 = vnet_classify_find_entry
406 (t0, (u8 *) h0, hash0, now);
409 vnet_buffer (b0)->l2_classify.opaque_index
411 vlib_buffer_advance (b0, e0->advance);
412 next0 = (e0->next_index < n_next_nodes) ?
413 e0->next_index : next0;
418 error0 = (next0 == ACL_NEXT_INDEX_DENY) ?
419 (is_output ? IP4_ERROR_OUTACL_SESSION_DENY :
420 IP4_ERROR_INACL_SESSION_DENY) : IP4_ERROR_NONE;
422 error0 = (next0 == ACL_NEXT_INDEX_DENY) ?
423 (is_output ? IP6_ERROR_OUTACL_SESSION_DENY :
424 IP6_ERROR_INACL_SESSION_DENY) : IP6_ERROR_NONE;
425 b0->error = error_node->errors[error0];
427 if (e0->action == CLASSIFY_ACTION_SET_IP4_FIB_INDEX
429 CLASSIFY_ACTION_SET_IP6_FIB_INDEX)
430 vnet_buffer (b0)->sw_if_index[VLIB_TX] =
438 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)
439 && (b0->flags & VLIB_BUFFER_IS_TRACED)))
441 ip_in_out_acl_trace_t *t =
442 vlib_add_trace (vm, node, b0, sizeof (*t));
444 vnet_buffer (b0)->sw_if_index[is_output ? VLIB_TX : VLIB_RX];
445 t->next_index = next0;
446 t->table_index = t0 ? t0 - vcm->tables : ~0;
447 t->offset = (e0 && t0) ? vnet_classify_get_offset (t0, e0) : ~0;
450 if ((next0 == ACL_NEXT_INDEX_DENY) && is_output)
452 /* on output, for the drop node to work properly, go back to ip header */
453 vlib_buffer_advance (b0, vnet_buffer (b0)->l2.l2_len);
456 /* verify speculative enqueue, maybe switch current next frame */
457 vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
458 to_next, n_left_to_next,
462 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
465 vlib_node_increment_counter (vm, node->node_index,
466 is_output ? IP_OUTACL_ERROR_MISS :
467 IP_INACL_ERROR_MISS, misses);
468 vlib_node_increment_counter (vm, node->node_index,
469 is_output ? IP_OUTACL_ERROR_HIT :
470 IP_INACL_ERROR_HIT, hits);
471 vlib_node_increment_counter (vm, node->node_index,
472 is_output ? IP_OUTACL_ERROR_CHAIN_HIT :
473 IP_INACL_ERROR_CHAIN_HIT, chain_hits);
474 return frame->n_vectors;
478 ip4_inacl (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame)
480 return ip_in_out_acl_inline (vm, node, frame, 1 /* is_ip4 */ ,
485 ip4_outacl (vlib_main_t * vm, vlib_node_runtime_t * node,
486 vlib_frame_t * frame)
488 return ip_in_out_acl_inline (vm, node, frame, 1 /* is_ip4 */ ,
494 VLIB_REGISTER_NODE (ip4_inacl_node) = {
495 .function = ip4_inacl,
497 .vector_size = sizeof (u32),
498 .format_trace = format_ip_inacl_trace,
499 .n_errors = ARRAY_LEN(ip_inacl_error_strings),
500 .error_strings = ip_inacl_error_strings,
502 .n_next_nodes = ACL_NEXT_INDEX_N_NEXT,
504 [ACL_NEXT_INDEX_DENY] = "ip4-drop",
508 VLIB_REGISTER_NODE (ip4_outacl_node) = {
509 .function = ip4_outacl,
510 .name = "ip4-outacl",
511 .vector_size = sizeof (u32),
512 .format_trace = format_ip_outacl_trace,
513 .n_errors = ARRAY_LEN(ip_outacl_error_strings),
514 .error_strings = ip_outacl_error_strings,
516 .n_next_nodes = ACL_NEXT_INDEX_N_NEXT,
518 [ACL_NEXT_INDEX_DENY] = "ip4-drop",
523 VLIB_NODE_FUNCTION_MULTIARCH (ip4_inacl_node, ip4_inacl);
524 VLIB_NODE_FUNCTION_MULTIARCH (ip4_outacl_node, ip4_outacl);
527 ip6_inacl (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame)
529 return ip_in_out_acl_inline (vm, node, frame, 0 /* is_ip4 */ ,
534 ip6_outacl (vlib_main_t * vm, vlib_node_runtime_t * node,
535 vlib_frame_t * frame)
537 return ip_in_out_acl_inline (vm, node, frame, 0 /* is_ip4 */ ,
542 VLIB_REGISTER_NODE (ip6_inacl_node) = {
543 .function = ip6_inacl,
545 .vector_size = sizeof (u32),
546 .format_trace = format_ip_inacl_trace,
547 .n_errors = ARRAY_LEN(ip_inacl_error_strings),
548 .error_strings = ip_inacl_error_strings,
550 .n_next_nodes = ACL_NEXT_INDEX_N_NEXT,
552 [ACL_NEXT_INDEX_DENY] = "ip6-drop",
556 VLIB_REGISTER_NODE (ip6_outacl_node) = {
557 .function = ip6_outacl,
558 .name = "ip6-outacl",
559 .vector_size = sizeof (u32),
560 .format_trace = format_ip_outacl_trace,
561 .n_errors = ARRAY_LEN(ip_outacl_error_strings),
562 .error_strings = ip_outacl_error_strings,
564 .n_next_nodes = ACL_NEXT_INDEX_N_NEXT,
566 [ACL_NEXT_INDEX_DENY] = "ip6-drop",
571 VLIB_NODE_FUNCTION_MULTIARCH (ip6_inacl_node, ip6_inacl);
572 VLIB_NODE_FUNCTION_MULTIARCH (ip6_outacl_node, ip6_outacl);
574 static clib_error_t *
575 ip_in_out_acl_init (vlib_main_t * vm)
580 VLIB_INIT_FUNCTION (ip_in_out_acl_init);
584 * fd.io coding-style-patch-verification: ON
587 * eval: (c-set-style "gnu")