2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
15 #include <vnet/ip/ip.h>
16 #include <vnet/classify/vnet_classify.h>
17 #include <vnet/classify/input_acl.h>
26 /* packet trace format function */
27 static u8 * format_ip_inacl_trace (u8 * s, va_list * args)
29 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
30 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
31 ip_inacl_trace_t * t = va_arg (*args, ip_inacl_trace_t *);
33 s = format (s, "INACL: sw_if_index %d, next_index %d, table %d, offset %d",
34 t->sw_if_index, t->next_index, t->table_index, t->offset);
38 vlib_node_registration_t ip4_inacl_node;
39 vlib_node_registration_t ip6_inacl_node;
41 #define foreach_ip_inacl_error \
42 _(MISS, "input ACL misses") \
43 _(HIT, "input ACL hits") \
44 _(CHAIN_HIT, "input ACL hits after chain walk")
47 #define _(sym,str) IP_INACL_ERROR_##sym,
48 foreach_ip_inacl_error
53 static char * ip_inacl_error_strings[] = {
54 #define _(sym,string) string,
55 foreach_ip_inacl_error
60 ip_inacl_inline (vlib_main_t * vm,
61 vlib_node_runtime_t * node,
62 vlib_frame_t * frame, int is_ip4)
64 u32 n_left_from, * from, * to_next;
65 acl_next_index_t next_index;
66 input_acl_main_t * am = &input_acl_main;
67 vnet_classify_main_t * vcm = am->vnet_classify_main;
68 f64 now = vlib_time_now (vm);
72 input_acl_table_id_t tid;
73 vlib_node_runtime_t * error_node;
76 n_next_nodes = node->n_next_nodes;
80 tid = INPUT_ACL_TABLE_IP4;
81 error_node = vlib_node_get_runtime (vm, ip4_input_node.index);
85 tid = INPUT_ACL_TABLE_IP6;
86 error_node = vlib_node_get_runtime (vm, ip6_input_node.index);
89 from = vlib_frame_vector_args (frame);
90 n_left_from = frame->n_vectors;
92 /* First pass: compute hashes */
94 while (n_left_from > 2)
96 vlib_buffer_t * b0, * b1;
99 u32 sw_if_index0, sw_if_index1;
100 u32 table_index0, table_index1;
101 vnet_classify_table_t * t0, * t1;
103 /* prefetch next iteration */
105 vlib_buffer_t * p1, * p2;
107 p1 = vlib_get_buffer (vm, from[1]);
108 p2 = vlib_get_buffer (vm, from[2]);
110 vlib_prefetch_buffer_header (p1, STORE);
111 CLIB_PREFETCH (p1->data, CLIB_CACHE_LINE_BYTES, STORE);
112 vlib_prefetch_buffer_header (p2, STORE);
113 CLIB_PREFETCH (p2->data, CLIB_CACHE_LINE_BYTES, STORE);
117 b0 = vlib_get_buffer (vm, bi0);
120 b1 = vlib_get_buffer (vm, bi1);
122 sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
123 table_index0 = am->classify_table_index_by_sw_if_index[tid][sw_if_index0];
125 sw_if_index1 = vnet_buffer (b1)->sw_if_index[VLIB_RX];
126 table_index1 = am->classify_table_index_by_sw_if_index[tid][sw_if_index1];
128 t0 = pool_elt_at_index (vcm->tables, table_index0);
130 t1 = pool_elt_at_index (vcm->tables, table_index1);
132 if (t0->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
133 h0 = (void *)vlib_buffer_get_current (b0) + t0->current_data_offset;
137 vnet_buffer(b0)->l2_classify.hash =
138 vnet_classify_hash_packet (t0, (u8 *) h0);
140 vnet_classify_prefetch_bucket (t0, vnet_buffer(b0)->l2_classify.hash);
142 if (t1->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
143 h1 = (void *)vlib_buffer_get_current (b1) + t1->current_data_offset;
147 vnet_buffer(b1)->l2_classify.hash =
148 vnet_classify_hash_packet (t1, (u8 *) h1);
150 vnet_classify_prefetch_bucket (t1, vnet_buffer(b1)->l2_classify.hash);
152 vnet_buffer(b0)->l2_classify.table_index = table_index0;
154 vnet_buffer(b1)->l2_classify.table_index = table_index1;
160 while (n_left_from > 0)
167 vnet_classify_table_t * t0;
170 b0 = vlib_get_buffer (vm, bi0);
172 sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
173 table_index0 = am->classify_table_index_by_sw_if_index[tid][sw_if_index0];
175 t0 = pool_elt_at_index (vcm->tables, table_index0);
177 if (t0->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
178 h0 = (void *)vlib_buffer_get_current (b0) + t0->current_data_offset;
182 vnet_buffer(b0)->l2_classify.hash =
183 vnet_classify_hash_packet (t0, (u8 *) h0);
185 vnet_buffer(b0)->l2_classify.table_index = table_index0;
186 vnet_classify_prefetch_bucket (t0, vnet_buffer(b0)->l2_classify.hash);
192 next_index = node->cached_next_index;
193 from = vlib_frame_vector_args (frame);
194 n_left_from = frame->n_vectors;
196 while (n_left_from > 0)
200 vlib_get_next_frame (vm, node, next_index,
201 to_next, n_left_to_next);
203 /* Not enough load/store slots to dual loop... */
204 while (n_left_from > 0 && n_left_to_next > 0)
208 u32 next0 = ACL_NEXT_INDEX_DENY;
210 vnet_classify_table_t * t0;
211 vnet_classify_entry_t * e0;
216 /* Stride 3 seems to work best */
217 if (PREDICT_TRUE (n_left_from > 3))
219 vlib_buffer_t * p1 = vlib_get_buffer(vm, from[3]);
220 vnet_classify_table_t * tp1;
224 table_index1 = vnet_buffer(p1)->l2_classify.table_index;
226 if (PREDICT_TRUE (table_index1 != ~0))
228 tp1 = pool_elt_at_index (vcm->tables, table_index1);
229 phash1 = vnet_buffer(p1)->l2_classify.hash;
230 vnet_classify_prefetch_entry (tp1, phash1);
234 /* speculatively enqueue b0 to the current next frame */
242 b0 = vlib_get_buffer (vm, bi0);
243 table_index0 = vnet_buffer(b0)->l2_classify.table_index;
246 vnet_get_config_data (am->vnet_config_main[tid],
247 &b0->current_config_index,
249 /* # bytes of config data */ 0);
251 vnet_buffer(b0)->l2_classify.opaque_index = ~0;
253 if (PREDICT_TRUE(table_index0 != ~0))
255 hash0 = vnet_buffer(b0)->l2_classify.hash;
256 t0 = pool_elt_at_index (vcm->tables, table_index0);
258 if (t0->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
259 h0 = (void *)vlib_buffer_get_current (b0) + t0->current_data_offset;
263 e0 = vnet_classify_find_entry (t0, (u8 *) h0, hash0,
267 vnet_buffer(b0)->l2_classify.opaque_index
269 vlib_buffer_advance (b0, e0->advance);
271 next0 = (e0->next_index < n_next_nodes)?
272 e0->next_index:next0;
277 error0 = (next0 == ACL_NEXT_INDEX_DENY)?
278 IP4_ERROR_INACL_SESSION_DENY:IP4_ERROR_NONE;
280 error0 = (next0 == ACL_NEXT_INDEX_DENY)?
281 IP6_ERROR_INACL_SESSION_DENY:IP6_ERROR_NONE;
282 b0->error = error_node->errors[error0];
284 if (e0->action == CLASSIFY_ACTION_SET_IP4_FIB_INDEX ||
285 e0->action == CLASSIFY_ACTION_SET_IP6_FIB_INDEX)
286 vnet_buffer (b0)->sw_if_index[VLIB_TX] = e0->metadata;
292 if (PREDICT_TRUE(t0->next_table_index != ~0))
293 t0 = pool_elt_at_index (vcm->tables,
294 t0->next_table_index);
297 next0 = (t0->miss_next_index < n_next_nodes)?
298 t0->miss_next_index:next0;
303 error0 = (next0 == ACL_NEXT_INDEX_DENY)?
304 IP4_ERROR_INACL_TABLE_MISS:IP4_ERROR_NONE;
306 error0 = (next0 == ACL_NEXT_INDEX_DENY)?
307 IP6_ERROR_INACL_TABLE_MISS:IP6_ERROR_NONE;
308 b0->error = error_node->errors[error0];
312 if (t0->current_data_flag == CLASSIFY_FLAG_USE_CURR_DATA)
313 h0 = (void *)vlib_buffer_get_current (b0) + t0->current_data_offset;
317 hash0 = vnet_classify_hash_packet (t0, (u8 *) h0);
318 e0 = vnet_classify_find_entry
319 (t0, (u8 *) h0, hash0, now);
322 vnet_buffer(b0)->l2_classify.opaque_index
324 vlib_buffer_advance (b0, e0->advance);
325 next0 = (e0->next_index < n_next_nodes)?
326 e0->next_index:next0;
331 error0 = (next0 == ACL_NEXT_INDEX_DENY)?
332 IP4_ERROR_INACL_SESSION_DENY:IP4_ERROR_NONE;
334 error0 = (next0 == ACL_NEXT_INDEX_DENY)?
335 IP6_ERROR_INACL_SESSION_DENY:IP6_ERROR_NONE;
336 b0->error = error_node->errors[error0];
338 if (e0->action == CLASSIFY_ACTION_SET_IP4_FIB_INDEX ||
339 e0->action == CLASSIFY_ACTION_SET_IP6_FIB_INDEX)
340 vnet_buffer (b0)->sw_if_index[VLIB_TX] = e0->metadata;
347 if (PREDICT_FALSE((node->flags & VLIB_NODE_FLAG_TRACE)
348 && (b0->flags & VLIB_BUFFER_IS_TRACED)))
350 ip_inacl_trace_t *t =
351 vlib_add_trace (vm, node, b0, sizeof (*t));
352 t->sw_if_index = vnet_buffer(b0)->sw_if_index[VLIB_RX];
353 t->next_index = next0;
354 t->table_index = t0 ? t0 - vcm->tables : ~0;
355 t->offset = (e0 && t0) ? vnet_classify_get_offset (t0, e0): ~0;
358 /* verify speculative enqueue, maybe switch current next frame */
359 vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
360 to_next, n_left_to_next,
364 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
367 vlib_node_increment_counter (vm, node->node_index,
370 vlib_node_increment_counter (vm, node->node_index,
373 vlib_node_increment_counter (vm, node->node_index,
374 IP_INACL_ERROR_CHAIN_HIT,
376 return frame->n_vectors;
380 ip4_inacl (vlib_main_t * vm,
381 vlib_node_runtime_t * node,
382 vlib_frame_t * frame)
384 return ip_inacl_inline (vm, node, frame, 1 /* is_ip4 */);
388 VLIB_REGISTER_NODE (ip4_inacl_node) = {
389 .function = ip4_inacl,
391 .vector_size = sizeof (u32),
392 .format_trace = format_ip_inacl_trace,
393 .n_errors = ARRAY_LEN(ip_inacl_error_strings),
394 .error_strings = ip_inacl_error_strings,
396 .n_next_nodes = ACL_NEXT_INDEX_N_NEXT,
398 [ACL_NEXT_INDEX_DENY] = "error-drop",
402 VLIB_NODE_FUNCTION_MULTIARCH (ip4_inacl_node, ip4_inacl)
405 ip6_inacl (vlib_main_t * vm,
406 vlib_node_runtime_t * node,
407 vlib_frame_t * frame)
409 return ip_inacl_inline (vm, node, frame, 0 /* is_ip4 */);
413 VLIB_REGISTER_NODE (ip6_inacl_node) = {
414 .function = ip6_inacl,
416 .vector_size = sizeof (u32),
417 .format_trace = format_ip_inacl_trace,
418 .n_errors = ARRAY_LEN(ip_inacl_error_strings),
419 .error_strings = ip_inacl_error_strings,
421 .n_next_nodes = ACL_NEXT_INDEX_N_NEXT,
423 [ACL_NEXT_INDEX_DENY] = "error-drop",
427 VLIB_NODE_FUNCTION_MULTIARCH (ip6_inacl_node, ip6_inacl)
429 static clib_error_t *
430 ip_inacl_init (vlib_main_t * vm)
435 VLIB_INIT_FUNCTION (ip_inacl_init);