2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
15 #include <vnet/ip/ip.h>
16 #include <vnet/classify/vnet_classify.h>
17 #include <vnet/classify/in_out_acl.h>
26 ip_in_out_acl_trace_t;
28 /* packet trace format function */
30 format_ip_in_out_acl_trace (u8 * s, u32 is_output, va_list * args)
32 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
33 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
34 ip_in_out_acl_trace_t *t = va_arg (*args, ip_in_out_acl_trace_t *);
36 s = format (s, "%s: sw_if_index %d, next_index %d, table %d, offset %d",
37 is_output ? "OUTACL" : "INACL",
38 t->sw_if_index, t->next_index, t->table_index, t->offset);
43 format_ip_inacl_trace (u8 * s, va_list * args)
45 return format_ip_in_out_acl_trace (s, 0 /* is_output */ , args);
49 format_ip_outacl_trace (u8 * s, va_list * args)
51 return format_ip_in_out_acl_trace (s, 1 /* is_output */ , args);
54 extern vlib_node_registration_t ip4_inacl_node;
55 extern vlib_node_registration_t ip4_outacl_node;
56 extern vlib_node_registration_t ip6_inacl_node;
57 extern vlib_node_registration_t ip6_outacl_node;
59 #define foreach_ip_inacl_error \
60 _(MISS, "input ACL misses") \
61 _(HIT, "input ACL hits") \
62 _(CHAIN_HIT, "input ACL hits after chain walk")
64 #define foreach_ip_outacl_error \
65 _(MISS, "output ACL misses") \
66 _(HIT, "output ACL hits") \
67 _(CHAIN_HIT, "output ACL hits after chain walk")
71 #define _(sym,str) IP_INACL_ERROR_##sym,
72 foreach_ip_inacl_error
78 static char *ip_inacl_error_strings[] = {
79 #define _(sym,string) string,
80 foreach_ip_inacl_error
86 #define _(sym,str) IP_OUTACL_ERROR_##sym,
87 foreach_ip_outacl_error
93 static char *ip_outacl_error_strings[] = {
94 #define _(sym,string) string,
95 foreach_ip_outacl_error
99 static_always_inline void
100 ip_in_out_acl_inline (vlib_main_t * vm,
101 vlib_node_runtime_t * node, vlib_buffer_t ** b,
102 u16 * next, u32 n_left, int is_ip4, int is_output,
105 in_out_acl_main_t *am = &in_out_acl_main;
106 vnet_classify_main_t *vcm = am->vnet_classify_main;
107 f64 now = vlib_time_now (vm);
111 in_out_acl_table_id_t tid;
112 vlib_node_runtime_t *error_node;
118 vnet_classify_table_t *t[4] = { 0, 0 };
121 n_next_nodes = node->n_next_nodes;
125 tid = IN_OUT_ACL_TABLE_IP4;
126 error_node = vlib_node_get_runtime (vm, ip4_input_node.index);
130 tid = IN_OUT_ACL_TABLE_IP6;
131 error_node = vlib_node_get_runtime (vm, ip6_input_node.index);
134 /* calculate hashes for b[0] & b[1] */
138 vnet_buffer (b[0])->sw_if_index[is_output ? VLIB_TX : VLIB_RX];
140 vnet_buffer (b[1])->sw_if_index[is_output ? VLIB_TX : VLIB_RX];
143 am->classify_table_index_by_sw_if_index[is_output][tid]
146 am->classify_table_index_by_sw_if_index[is_output][tid]
149 t[2] = pool_elt_at_index (vcm->tables, table_index[2]);
150 t[3] = pool_elt_at_index (vcm->tables, table_index[3]);
152 if (t[2]->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
154 (void *) vlib_buffer_get_current (b[0]) + t[2]->current_data_offset;
158 if (t[3]->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
160 (void *) vlib_buffer_get_current (b[1]) + t[3]->current_data_offset;
166 /* Save the rewrite length, since we are using the l2_classify struct */
167 vnet_buffer (b[0])->l2_classify.pad.l2_len =
168 vnet_buffer (b[0])->ip.save_rewrite_length;
169 /* advance the match pointer so the matching happens on IP header */
170 h[2] += vnet_buffer (b[0])->l2_classify.pad.l2_len;
172 /* Save the rewrite length, since we are using the l2_classify struct */
173 vnet_buffer (b[1])->l2_classify.pad.l2_len =
174 vnet_buffer (b[1])->ip.save_rewrite_length;
175 /* advance the match pointer so the matching happens on IP header */
176 h[3] += vnet_buffer (b[1])->l2_classify.pad.l2_len;
179 hash[2] = vnet_classify_hash_packet_inline (t[2], (u8 *) h[2]);
180 hash[3] = vnet_classify_hash_packet_inline (t[3], (u8 *) h[3]);
182 vnet_buffer (b[0])->l2_classify.hash = hash[2];
183 vnet_buffer (b[1])->l2_classify.hash = hash[3];
185 vnet_buffer (b[0])->l2_classify.table_index = table_index[2];
186 vnet_buffer (b[1])->l2_classify.table_index = table_index[3];
188 vnet_buffer (b[0])->l2_classify.opaque_index = ~0;
189 vnet_buffer (b[1])->l2_classify.opaque_index = ~0;
191 vnet_classify_prefetch_bucket (t[2],
192 vnet_buffer (b[0])->l2_classify.hash);
193 vnet_classify_prefetch_bucket (t[3],
194 vnet_buffer (b[1])->l2_classify.hash);
199 vnet_classify_entry_t *e[2] = { 0, 0 };
200 u32 _next[2] = { ACL_NEXT_INDEX_DENY, ACL_NEXT_INDEX_DENY };
208 sw_if_index[0] = sw_if_index[2];
209 sw_if_index[1] = sw_if_index[3];
211 table_index[0] = table_index[2];
212 table_index[1] = table_index[3];
217 /* prefetch next iteration */
220 vlib_prefetch_buffer_header (b[4], LOAD);
221 vlib_prefetch_buffer_header (b[5], LOAD);
223 CLIB_PREFETCH (b[4]->data, CLIB_CACHE_LINE_BYTES, LOAD);
224 CLIB_PREFETCH (b[5]->data, CLIB_CACHE_LINE_BYTES, LOAD);
227 /* calculate hashes for b[2] & b[3] */
231 vnet_buffer (b[2])->sw_if_index[is_output ? VLIB_TX : VLIB_RX];
233 vnet_buffer (b[3])->sw_if_index[is_output ? VLIB_TX : VLIB_RX];
236 am->classify_table_index_by_sw_if_index[is_output][tid]
239 am->classify_table_index_by_sw_if_index[is_output][tid]
242 t[2] = pool_elt_at_index (vcm->tables, table_index[2]);
243 t[3] = pool_elt_at_index (vcm->tables, table_index[3]);
245 if (t[2]->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
247 (void *) vlib_buffer_get_current (b[2]) +
248 t[2]->current_data_offset;
252 if (t[3]->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
254 (void *) vlib_buffer_get_current (b[3]) +
255 t[3]->current_data_offset;
261 /* Save the rewrite length, since we are using the l2_classify struct */
262 vnet_buffer (b[2])->l2_classify.pad.l2_len =
263 vnet_buffer (b[2])->ip.save_rewrite_length;
264 /* advance the match pointer so the matching happens on IP header */
265 h[2] += vnet_buffer (b[2])->l2_classify.pad.l2_len;
267 /* Save the rewrite length, since we are using the l2_classify struct */
268 vnet_buffer (b[3])->l2_classify.pad.l2_len =
269 vnet_buffer (b[3])->ip.save_rewrite_length;
270 /* advance the match pointer so the matching happens on IP header */
271 h[3] += vnet_buffer (b[3])->l2_classify.pad.l2_len;
274 hash[2] = vnet_classify_hash_packet_inline (t[2], (u8 *) h[2]);
275 hash[3] = vnet_classify_hash_packet_inline (t[3], (u8 *) h[3]);
277 vnet_buffer (b[2])->l2_classify.hash = hash[2];
278 vnet_buffer (b[3])->l2_classify.hash = hash[3];
280 vnet_buffer (b[2])->l2_classify.table_index = table_index[2];
281 vnet_buffer (b[3])->l2_classify.table_index = table_index[3];
283 vnet_buffer (b[2])->l2_classify.opaque_index = ~0;
284 vnet_buffer (b[3])->l2_classify.opaque_index = ~0;
286 vnet_classify_prefetch_bucket (t[2],
289 vnet_classify_prefetch_bucket (t[3],
294 /* find entry for b[0] & b[1] */
295 vnet_get_config_data (am->vnet_config_main[is_output][tid],
296 &b[0]->current_config_index, &_next[0],
297 /* # bytes of config data */ 0);
298 vnet_get_config_data (am->vnet_config_main[is_output][tid],
299 &b[1]->current_config_index, &_next[1],
300 /* # bytes of config data */ 0);
302 if (PREDICT_TRUE (table_index[0] != ~0))
305 vnet_classify_find_entry_inline (t[0], (u8 *) h[0], hash[0], now);
308 vnet_buffer (b[0])->l2_classify.opaque_index
309 = e[0]->opaque_index;
310 vlib_buffer_advance (b[0], e[0]->advance);
312 _next[0] = (e[0]->next_index < n_next_nodes) ?
313 e[0]->next_index : _next[0];
318 error[0] = (_next[0] == ACL_NEXT_INDEX_DENY) ?
319 (is_output ? IP4_ERROR_OUTACL_SESSION_DENY :
320 IP4_ERROR_INACL_SESSION_DENY) : IP4_ERROR_NONE;
322 error[0] = (_next[0] == ACL_NEXT_INDEX_DENY) ?
323 (is_output ? IP6_ERROR_OUTACL_SESSION_DENY :
324 IP6_ERROR_INACL_SESSION_DENY) : IP6_ERROR_NONE;
325 b[0]->error = error_node->errors[error[0]];
329 if (e[0]->action == CLASSIFY_ACTION_SET_IP4_FIB_INDEX ||
330 e[0]->action == CLASSIFY_ACTION_SET_IP6_FIB_INDEX)
331 vnet_buffer (b[0])->sw_if_index[VLIB_TX] = e[0]->metadata;
332 else if (e[0]->action == CLASSIFY_ACTION_SET_METADATA)
333 vnet_buffer (b[0])->ip.adj_index[VLIB_TX] =
341 if (PREDICT_TRUE (t[0]->next_table_index != ~0))
342 t[0] = pool_elt_at_index (vcm->tables,
343 t[0]->next_table_index);
346 _next[0] = (t[0]->miss_next_index < n_next_nodes) ?
347 t[0]->miss_next_index : _next[0];
352 error[0] = (_next[0] == ACL_NEXT_INDEX_DENY) ?
353 (is_output ? IP4_ERROR_OUTACL_TABLE_MISS :
354 IP4_ERROR_INACL_TABLE_MISS) : IP4_ERROR_NONE;
356 error[0] = (_next[0] == ACL_NEXT_INDEX_DENY) ?
357 (is_output ? IP6_ERROR_OUTACL_TABLE_MISS :
358 IP6_ERROR_INACL_TABLE_MISS) : IP6_ERROR_NONE;
359 b[0]->error = error_node->errors[error[0]];
363 if (t[0]->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
365 (void *) vlib_buffer_get_current (b[0]) +
366 t[0]->current_data_offset;
370 /* advance the match pointer so the matching happens on IP header */
372 h[0] += vnet_buffer (b[0])->l2_classify.pad.l2_len;
375 vnet_classify_hash_packet_inline (t[0], (u8 *) h[0]);
377 vnet_classify_find_entry_inline (t[0], (u8 *) h[0],
381 vnet_buffer (b[0])->l2_classify.opaque_index
382 = e[0]->opaque_index;
383 vlib_buffer_advance (b[0], e[0]->advance);
384 _next[0] = (e[0]->next_index < n_next_nodes) ?
385 e[0]->next_index : _next[0];
390 error[0] = (_next[0] == ACL_NEXT_INDEX_DENY) ?
391 (is_output ? IP4_ERROR_OUTACL_SESSION_DENY :
392 IP4_ERROR_INACL_SESSION_DENY) : IP4_ERROR_NONE;
394 error[0] = (_next[0] == ACL_NEXT_INDEX_DENY) ?
395 (is_output ? IP6_ERROR_OUTACL_SESSION_DENY :
396 IP6_ERROR_INACL_SESSION_DENY) : IP6_ERROR_NONE;
397 b[0]->error = error_node->errors[error[0]];
402 CLASSIFY_ACTION_SET_IP4_FIB_INDEX
404 CLASSIFY_ACTION_SET_IP6_FIB_INDEX)
405 vnet_buffer (b[0])->sw_if_index[VLIB_TX] =
407 else if (e[0]->action ==
408 CLASSIFY_ACTION_SET_METADATA)
409 vnet_buffer (b[0])->ip.adj_index[VLIB_TX] =
418 if (PREDICT_TRUE (table_index[1] != ~0))
421 vnet_classify_find_entry_inline (t[1], (u8 *) h[1], hash[1], now);
424 vnet_buffer (b[1])->l2_classify.opaque_index
425 = e[1]->opaque_index;
426 vlib_buffer_advance (b[1], e[1]->advance);
428 _next[1] = (e[1]->next_index < n_next_nodes) ?
429 e[1]->next_index : _next[1];
434 error[1] = (_next[1] == ACL_NEXT_INDEX_DENY) ?
435 (is_output ? IP4_ERROR_OUTACL_SESSION_DENY :
436 IP4_ERROR_INACL_SESSION_DENY) : IP4_ERROR_NONE;
438 error[1] = (_next[1] == ACL_NEXT_INDEX_DENY) ?
439 (is_output ? IP6_ERROR_OUTACL_SESSION_DENY :
440 IP6_ERROR_INACL_SESSION_DENY) : IP6_ERROR_NONE;
441 b[1]->error = error_node->errors[error[1]];
445 if (e[1]->action == CLASSIFY_ACTION_SET_IP4_FIB_INDEX ||
446 e[1]->action == CLASSIFY_ACTION_SET_IP6_FIB_INDEX)
447 vnet_buffer (b[1])->sw_if_index[VLIB_TX] = e[1]->metadata;
448 else if (e[1]->action == CLASSIFY_ACTION_SET_METADATA)
449 vnet_buffer (b[1])->ip.adj_index[VLIB_TX] =
457 if (PREDICT_TRUE (t[1]->next_table_index != ~0))
458 t[1] = pool_elt_at_index (vcm->tables,
459 t[1]->next_table_index);
462 _next[1] = (t[1]->miss_next_index < n_next_nodes) ?
463 t[1]->miss_next_index : _next[1];
468 error[1] = (_next[1] == ACL_NEXT_INDEX_DENY) ?
469 (is_output ? IP4_ERROR_OUTACL_TABLE_MISS :
470 IP4_ERROR_INACL_TABLE_MISS) : IP4_ERROR_NONE;
472 error[1] = (_next[1] == ACL_NEXT_INDEX_DENY) ?
473 (is_output ? IP6_ERROR_OUTACL_TABLE_MISS :
474 IP6_ERROR_INACL_TABLE_MISS) : IP6_ERROR_NONE;
475 b[1]->error = error_node->errors[error[1]];
479 if (t[1]->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
481 (void *) vlib_buffer_get_current (b[1]) +
482 t[1]->current_data_offset;
486 /* advance the match pointer so the matching happens on IP header */
488 h[1] += vnet_buffer (b[1])->l2_classify.pad.l2_len;
491 vnet_classify_hash_packet_inline (t[1], (u8 *) h[1]);
493 vnet_classify_find_entry_inline (t[1], (u8 *) h[1],
497 vnet_buffer (b[1])->l2_classify.opaque_index
498 = e[1]->opaque_index;
499 vlib_buffer_advance (b[1], e[1]->advance);
500 _next[1] = (e[1]->next_index < n_next_nodes) ?
501 e[1]->next_index : _next[1];
506 error[1] = (_next[1] == ACL_NEXT_INDEX_DENY) ?
507 (is_output ? IP4_ERROR_OUTACL_SESSION_DENY :
508 IP4_ERROR_INACL_SESSION_DENY) : IP4_ERROR_NONE;
510 error[1] = (_next[1] == ACL_NEXT_INDEX_DENY) ?
511 (is_output ? IP6_ERROR_OUTACL_SESSION_DENY :
512 IP6_ERROR_INACL_SESSION_DENY) : IP6_ERROR_NONE;
513 b[1]->error = error_node->errors[error[1]];
518 CLASSIFY_ACTION_SET_IP4_FIB_INDEX
520 CLASSIFY_ACTION_SET_IP6_FIB_INDEX)
521 vnet_buffer (b[1])->sw_if_index[VLIB_TX] =
523 else if (e[1]->action ==
524 CLASSIFY_ACTION_SET_METADATA)
525 vnet_buffer (b[1])->ip.adj_index[VLIB_TX] =
534 if (do_trace && b[0]->flags & VLIB_BUFFER_IS_TRACED)
536 ip_in_out_acl_trace_t *_t =
537 vlib_add_trace (vm, node, b[0], sizeof (*_t));
539 vnet_buffer (b[0])->sw_if_index[is_output ? VLIB_TX : VLIB_RX];
540 _t->next_index = _next[0];
541 _t->table_index = t[0] ? t[0] - vcm->tables : ~0;
543 && t[0]) ? vnet_classify_get_offset (t[0], e[0]) : ~0;
546 if (do_trace && b[1]->flags & VLIB_BUFFER_IS_TRACED)
548 ip_in_out_acl_trace_t *_t =
549 vlib_add_trace (vm, node, b[1], sizeof (*_t));
551 vnet_buffer (b[1])->sw_if_index[is_output ? VLIB_TX : VLIB_RX];
552 _t->next_index = _next[1];
553 _t->table_index = t[1] ? t[1] - vcm->tables : ~0;
555 && t[1]) ? vnet_classify_get_offset (t[1], e[1]) : ~0;
558 if ((_next[0] == ACL_NEXT_INDEX_DENY) && is_output)
560 /* on output, for the drop node to work properly, go back to ip header */
561 vlib_buffer_advance (b[0], vnet_buffer (b[0])->l2.l2_len);
564 if ((_next[1] == ACL_NEXT_INDEX_DENY) && is_output)
566 /* on output, for the drop node to work properly, go back to ip header */
567 vlib_buffer_advance (b[1], vnet_buffer (b[1])->l2.l2_len);
584 vnet_classify_table_t *t0 = 0;
585 vnet_classify_entry_t *e0 = 0;
586 u32 next0 = ACL_NEXT_INDEX_DENY;
591 vnet_buffer (b[0])->sw_if_index[is_output ? VLIB_TX : VLIB_RX];
593 am->classify_table_index_by_sw_if_index[is_output][tid][sw_if_index0];
595 t0 = pool_elt_at_index (vcm->tables, table_index0);
597 if (t0->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
599 (void *) vlib_buffer_get_current (b[0]) + t0->current_data_offset;
605 /* Save the rewrite length, since we are using the l2_classify struct */
606 vnet_buffer (b[0])->l2_classify.pad.l2_len =
607 vnet_buffer (b[0])->ip.save_rewrite_length;
608 /* advance the match pointer so the matching happens on IP header */
609 h0 += vnet_buffer (b[0])->l2_classify.pad.l2_len;
612 vnet_buffer (b[0])->l2_classify.hash =
613 vnet_classify_hash_packet (t0, (u8 *) h0);
615 vnet_buffer (b[0])->l2_classify.table_index = table_index0;
616 vnet_buffer (b[0])->l2_classify.opaque_index = ~0;
618 vnet_get_config_data (am->vnet_config_main[is_output][tid],
619 &b[0]->current_config_index, &next0,
620 /* # bytes of config data */ 0);
622 if (PREDICT_TRUE (table_index0 != ~0))
624 hash0 = vnet_buffer (b[0])->l2_classify.hash;
625 t0 = pool_elt_at_index (vcm->tables, table_index0);
627 if (t0->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
629 (void *) vlib_buffer_get_current (b[0]) +
630 t0->current_data_offset;
634 /* advance the match pointer so the matching happens on IP header */
636 h0 += vnet_buffer (b[0])->l2_classify.pad.l2_len;
638 e0 = vnet_classify_find_entry_inline (t0, (u8 *) h0, hash0, now);
641 vnet_buffer (b[0])->l2_classify.opaque_index = e0->opaque_index;
642 vlib_buffer_advance (b[0], e0->advance);
644 next0 = (e0->next_index < n_next_nodes) ?
645 e0->next_index : next0;
650 error0 = (next0 == ACL_NEXT_INDEX_DENY) ?
651 (is_output ? IP4_ERROR_OUTACL_SESSION_DENY :
652 IP4_ERROR_INACL_SESSION_DENY) : IP4_ERROR_NONE;
654 error0 = (next0 == ACL_NEXT_INDEX_DENY) ?
655 (is_output ? IP6_ERROR_OUTACL_SESSION_DENY :
656 IP6_ERROR_INACL_SESSION_DENY) : IP6_ERROR_NONE;
657 b[0]->error = error_node->errors[error0];
661 if (e0->action == CLASSIFY_ACTION_SET_IP4_FIB_INDEX ||
662 e0->action == CLASSIFY_ACTION_SET_IP6_FIB_INDEX)
663 vnet_buffer (b[0])->sw_if_index[VLIB_TX] = e0->metadata;
664 else if (e0->action == CLASSIFY_ACTION_SET_METADATA)
665 vnet_buffer (b[0])->ip.adj_index[VLIB_TX] = e0->metadata;
672 if (PREDICT_TRUE (t0->next_table_index != ~0))
674 pool_elt_at_index (vcm->tables, t0->next_table_index);
677 next0 = (t0->miss_next_index < n_next_nodes) ?
678 t0->miss_next_index : next0;
683 error0 = (next0 == ACL_NEXT_INDEX_DENY) ?
684 (is_output ? IP4_ERROR_OUTACL_TABLE_MISS :
685 IP4_ERROR_INACL_TABLE_MISS) : IP4_ERROR_NONE;
687 error0 = (next0 == ACL_NEXT_INDEX_DENY) ?
688 (is_output ? IP6_ERROR_OUTACL_TABLE_MISS :
689 IP6_ERROR_INACL_TABLE_MISS) : IP6_ERROR_NONE;
690 b[0]->error = error_node->errors[error0];
694 if (t0->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
696 (void *) vlib_buffer_get_current (b[0]) +
697 t0->current_data_offset;
701 /* advance the match pointer so the matching happens on IP header */
703 h0 += vnet_buffer (b[0])->l2_classify.pad.l2_len;
705 hash0 = vnet_classify_hash_packet_inline (t0, (u8 *) h0);
706 e0 = vnet_classify_find_entry_inline
707 (t0, (u8 *) h0, hash0, now);
710 vnet_buffer (b[0])->l2_classify.opaque_index
712 vlib_buffer_advance (b[0], e0->advance);
713 next0 = (e0->next_index < n_next_nodes) ?
714 e0->next_index : next0;
718 error0 = (next0 == ACL_NEXT_INDEX_DENY) ?
719 (is_output ? IP4_ERROR_OUTACL_SESSION_DENY :
720 IP4_ERROR_INACL_SESSION_DENY) : IP4_ERROR_NONE;
722 error0 = (next0 == ACL_NEXT_INDEX_DENY) ?
723 (is_output ? IP6_ERROR_OUTACL_SESSION_DENY :
724 IP6_ERROR_INACL_SESSION_DENY) : IP6_ERROR_NONE;
725 b[0]->error = error_node->errors[error0];
730 CLASSIFY_ACTION_SET_IP4_FIB_INDEX
732 CLASSIFY_ACTION_SET_IP6_FIB_INDEX)
733 vnet_buffer (b[0])->sw_if_index[VLIB_TX] =
735 else if (e0->action == CLASSIFY_ACTION_SET_METADATA)
736 vnet_buffer (b[0])->ip.adj_index[VLIB_TX] =
745 if (do_trace && b[0]->flags & VLIB_BUFFER_IS_TRACED)
747 ip_in_out_acl_trace_t *t =
748 vlib_add_trace (vm, node, b[0], sizeof (*t));
750 vnet_buffer (b[0])->sw_if_index[is_output ? VLIB_TX : VLIB_RX];
751 t->next_index = next0;
752 t->table_index = t0 ? t0 - vcm->tables : ~0;
753 t->offset = (e0 && t0) ? vnet_classify_get_offset (t0, e0) : ~0;
756 if ((next0 == ACL_NEXT_INDEX_DENY) && is_output)
758 /* on output, for the drop node to work properly, go back to ip header */
759 vlib_buffer_advance (b[0], vnet_buffer (b[0])->l2.l2_len);
770 vlib_node_increment_counter (vm, node->node_index,
771 is_output ? IP_OUTACL_ERROR_MISS :
772 IP_INACL_ERROR_MISS, misses);
773 vlib_node_increment_counter (vm, node->node_index,
774 is_output ? IP_OUTACL_ERROR_HIT :
775 IP_INACL_ERROR_HIT, hits);
776 vlib_node_increment_counter (vm, node->node_index,
777 is_output ? IP_OUTACL_ERROR_CHAIN_HIT :
778 IP_INACL_ERROR_CHAIN_HIT, chain_hits);
781 VLIB_NODE_FN (ip4_inacl_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
782 vlib_frame_t * frame)
786 vlib_buffer_t *bufs[VLIB_FRAME_SIZE];
787 u16 nexts[VLIB_FRAME_SIZE];
789 from = vlib_frame_vector_args (frame);
791 vlib_get_buffers (vm, from, bufs, frame->n_vectors);
793 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)))
794 ip_in_out_acl_inline (vm, node, bufs, nexts, frame->n_vectors,
796 0 /* is_output */ , 1 /* is_trace */ );
798 ip_in_out_acl_inline (vm, node, bufs, nexts, frame->n_vectors,
800 0 /* is_output */ , 0 /* is_trace */ );
802 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
804 return frame->n_vectors;
807 VLIB_NODE_FN (ip4_outacl_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
808 vlib_frame_t * frame)
811 vlib_buffer_t *bufs[VLIB_FRAME_SIZE];
812 u16 nexts[VLIB_FRAME_SIZE];
814 from = vlib_frame_vector_args (frame);
816 vlib_get_buffers (vm, from, bufs, frame->n_vectors);
818 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)))
819 ip_in_out_acl_inline (vm, node, bufs, nexts, frame->n_vectors,
821 1 /* is_output */ , 1 /* is_trace */ );
823 ip_in_out_acl_inline (vm, node, bufs, nexts, frame->n_vectors,
825 1 /* is_output */ , 0 /* is_trace */ );
827 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
829 return frame->n_vectors;
833 VLIB_REGISTER_NODE (ip4_inacl_node) = {
835 .vector_size = sizeof (u32),
836 .format_trace = format_ip_inacl_trace,
837 .n_errors = ARRAY_LEN(ip_inacl_error_strings),
838 .error_strings = ip_inacl_error_strings,
840 .n_next_nodes = ACL_NEXT_INDEX_N_NEXT,
842 [ACL_NEXT_INDEX_DENY] = "ip4-drop",
846 VLIB_REGISTER_NODE (ip4_outacl_node) = {
847 .name = "ip4-outacl",
848 .vector_size = sizeof (u32),
849 .format_trace = format_ip_outacl_trace,
850 .n_errors = ARRAY_LEN(ip_outacl_error_strings),
851 .error_strings = ip_outacl_error_strings,
853 .n_next_nodes = ACL_NEXT_INDEX_N_NEXT,
855 [ACL_NEXT_INDEX_DENY] = "ip4-drop",
860 VLIB_NODE_FN (ip6_inacl_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
861 vlib_frame_t * frame)
864 vlib_buffer_t *bufs[VLIB_FRAME_SIZE];
865 u16 nexts[VLIB_FRAME_SIZE];
867 from = vlib_frame_vector_args (frame);
869 vlib_get_buffers (vm, from, bufs, frame->n_vectors);
871 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)))
872 ip_in_out_acl_inline (vm, node, bufs, nexts, frame->n_vectors,
874 0 /* is_output */ , 1 /* is_trace */ );
876 ip_in_out_acl_inline (vm, node, bufs, nexts, frame->n_vectors,
878 0 /* is_output */ , 0 /* is_trace */ );
880 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
882 return frame->n_vectors;
885 VLIB_NODE_FN (ip6_outacl_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
886 vlib_frame_t * frame)
889 vlib_buffer_t *bufs[VLIB_FRAME_SIZE];
890 u16 nexts[VLIB_FRAME_SIZE];
892 from = vlib_frame_vector_args (frame);
894 vlib_get_buffers (vm, from, bufs, frame->n_vectors);
896 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)))
897 ip_in_out_acl_inline (vm, node, bufs, nexts, frame->n_vectors,
899 1 /* is_output */ , 1 /* is_trace */ );
901 ip_in_out_acl_inline (vm, node, bufs, nexts, frame->n_vectors,
903 1 /* is_output */ , 0 /* is_trace */ );
905 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
907 return frame->n_vectors;
911 VLIB_REGISTER_NODE (ip6_inacl_node) = {
913 .vector_size = sizeof (u32),
914 .format_trace = format_ip_inacl_trace,
915 .n_errors = ARRAY_LEN(ip_inacl_error_strings),
916 .error_strings = ip_inacl_error_strings,
918 .n_next_nodes = ACL_NEXT_INDEX_N_NEXT,
920 [ACL_NEXT_INDEX_DENY] = "ip6-drop",
924 VLIB_REGISTER_NODE (ip6_outacl_node) = {
925 .name = "ip6-outacl",
926 .vector_size = sizeof (u32),
927 .format_trace = format_ip_outacl_trace,
928 .n_errors = ARRAY_LEN(ip_outacl_error_strings),
929 .error_strings = ip_outacl_error_strings,
931 .n_next_nodes = ACL_NEXT_INDEX_N_NEXT,
933 [ACL_NEXT_INDEX_DENY] = "ip6-drop",
938 #ifndef CLIB_MARCH_VARIANT
939 static clib_error_t *
940 ip_in_out_acl_init (vlib_main_t * vm)
945 VLIB_INIT_FUNCTION (ip_in_out_acl_init);
946 #endif /* CLIB_MARCH_VARIANT */
950 * fd.io coding-style-patch-verification: ON
953 * eval: (c-set-style "gnu")