2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
15 #include <vnet/classify/vnet_classify.h>
16 #include <vnet/classify/input_acl.h>
17 #include <vnet/ip/ip.h>
18 #include <vnet/api_errno.h> /* for API error numbers */
19 #include <vnet/l2/l2_classify.h> /* for L2_INPUT_CLASSIFY_NEXT_xxx */
20 #include <vnet/fib/fib_table.h>
22 vnet_classify_main_t vnet_classify_main;
24 #if VALIDATION_SCAFFOLDING
25 /* Validation scaffolding */
26 void mv (vnet_classify_table_t * t)
30 oldheap = clib_mem_set_heap (t->mheap);
32 clib_mem_set_heap (oldheap);
35 void rogue (vnet_classify_table_t * t)
38 vnet_classify_entry_t * v, * save_v;
39 u32 active_elements = 0;
40 vnet_classify_bucket_t * b;
42 for (i = 0; i < t->nbuckets; i++)
47 save_v = vnet_classify_get_entry (t, b->offset);
48 for (j = 0; j < (1<<b->log2_pages); j++)
50 for (k = 0; k < t->entries_per_page; k++)
52 v = vnet_classify_entry_at_index
53 (t, save_v, j*t->entries_per_page + k);
55 if (vnet_classify_entry_is_busy (v))
61 if (active_elements != t->active_elements)
62 clib_warning ("found %u expected %u elts", active_elements,
66 void mv (vnet_classify_table_t * t) { }
67 void rogue (vnet_classify_table_t * t) { }
70 void vnet_classify_register_unformat_l2_next_index_fn (unformat_function_t * fn)
72 vnet_classify_main_t * cm = &vnet_classify_main;
74 vec_add1 (cm->unformat_l2_next_index_fns, fn);
77 void vnet_classify_register_unformat_ip_next_index_fn (unformat_function_t * fn)
79 vnet_classify_main_t * cm = &vnet_classify_main;
81 vec_add1 (cm->unformat_ip_next_index_fns, fn);
85 vnet_classify_register_unformat_acl_next_index_fn (unformat_function_t * fn)
87 vnet_classify_main_t * cm = &vnet_classify_main;
89 vec_add1 (cm->unformat_acl_next_index_fns, fn);
93 vnet_classify_register_unformat_policer_next_index_fn (unformat_function_t * fn)
95 vnet_classify_main_t * cm = &vnet_classify_main;
97 vec_add1 (cm->unformat_policer_next_index_fns, fn);
100 void vnet_classify_register_unformat_opaque_index_fn (unformat_function_t * fn)
102 vnet_classify_main_t * cm = &vnet_classify_main;
104 vec_add1 (cm->unformat_opaque_index_fns, fn);
107 vnet_classify_table_t *
108 vnet_classify_new_table (vnet_classify_main_t *cm,
109 u8 * mask, u32 nbuckets, u32 memory_size,
113 vnet_classify_table_t * t;
116 nbuckets = 1 << (max_log2 (nbuckets));
118 pool_get_aligned (cm->tables, t, CLIB_CACHE_LINE_BYTES);
119 memset(t, 0, sizeof (*t));
121 vec_validate_aligned (t->mask, match_n_vectors - 1, sizeof(u32x4));
122 clib_memcpy (t->mask, mask, match_n_vectors * sizeof (u32x4));
124 t->next_table_index = ~0;
125 t->nbuckets = nbuckets;
126 t->log2_nbuckets = max_log2 (nbuckets);
127 t->match_n_vectors = match_n_vectors;
128 t->skip_n_vectors = skip_n_vectors;
129 t->entries_per_page = 2;
131 t->mheap = mheap_alloc (0 /* use VM */, memory_size);
133 vec_validate_aligned (t->buckets, nbuckets - 1, CLIB_CACHE_LINE_BYTES);
134 oldheap = clib_mem_set_heap (t->mheap);
136 t->writer_lock = clib_mem_alloc_aligned (CLIB_CACHE_LINE_BYTES,
137 CLIB_CACHE_LINE_BYTES);
138 t->writer_lock[0] = 0;
140 clib_mem_set_heap (oldheap);
144 void vnet_classify_delete_table_index (vnet_classify_main_t *cm,
147 vnet_classify_table_t * t;
149 /* Tolerate multiple frees, up to a point */
150 if (pool_is_free_index (cm->tables, table_index))
153 t = pool_elt_at_index (cm->tables, table_index);
154 if (t->next_table_index != ~0)
155 vnet_classify_delete_table_index (cm, t->next_table_index);
158 vec_free (t->buckets);
159 mheap_free (t->mheap);
161 pool_put (cm->tables, t);
164 static vnet_classify_entry_t *
165 vnet_classify_entry_alloc (vnet_classify_table_t * t, u32 log2_pages)
167 vnet_classify_entry_t * rv = 0;
169 vnet_classify_entry_##size##_t * rv##size = 0;
170 foreach_size_in_u32x4;
175 ASSERT (t->writer_lock[0]);
176 if (log2_pages >= vec_len (t->freelists) || t->freelists [log2_pages] == 0)
178 oldheap = clib_mem_set_heap (t->mheap);
180 vec_validate (t->freelists, log2_pages);
182 switch(t->match_n_vectors)
184 /* Euchre the vector allocator into allocating the right sizes */
187 vec_validate_aligned \
188 (rv##size, ((1<<log2_pages)*t->entries_per_page) - 1, \
189 CLIB_CACHE_LINE_BYTES); \
190 rv = (vnet_classify_entry_t *) rv##size; \
192 foreach_size_in_u32x4;
199 clib_mem_set_heap (oldheap);
202 rv = t->freelists[log2_pages];
203 t->freelists[log2_pages] = rv->next_free;
207 ASSERT (vec_len(rv) == (1<<log2_pages)*t->entries_per_page);
209 switch (t->match_n_vectors)
214 memset (rv, 0xff, sizeof (*rv##size) * vec_len(rv)); \
216 foreach_size_in_u32x4;
227 vnet_classify_entry_free (vnet_classify_table_t * t,
228 vnet_classify_entry_t * v)
232 ASSERT (t->writer_lock[0]);
234 free_list_index = min_log2(vec_len(v)/t->entries_per_page);
236 ASSERT(vec_len (t->freelists) > free_list_index);
238 v->next_free = t->freelists[free_list_index];
239 t->freelists[free_list_index] = v;
242 static inline void make_working_copy
243 (vnet_classify_table_t * t, vnet_classify_bucket_t * b)
245 vnet_classify_entry_t * v;
246 vnet_classify_bucket_t working_bucket __attribute__((aligned (8)));
248 vnet_classify_entry_t * working_copy;
250 vnet_classify_entry_##size##_t * working_copy##size = 0;
251 foreach_size_in_u32x4;
253 u32 cpu_number = os_get_cpu_number();
255 if (cpu_number >= vec_len (t->working_copies))
257 oldheap = clib_mem_set_heap (t->mheap);
258 vec_validate (t->working_copies, cpu_number);
259 clib_mem_set_heap (oldheap);
263 * working_copies are per-cpu so that near-simultaneous
264 * updates from multiple threads will not result in sporadic, spurious
267 working_copy = t->working_copies[cpu_number];
269 t->saved_bucket.as_u64 = b->as_u64;
270 oldheap = clib_mem_set_heap (t->mheap);
272 if ((1<<b->log2_pages)*t->entries_per_page > vec_len (working_copy))
274 switch(t->match_n_vectors)
276 /* Euchre the vector allocator into allocating the right sizes */
279 working_copy##size = (void *) working_copy; \
280 vec_validate_aligned \
281 (working_copy##size, \
282 ((1<<b->log2_pages)*t->entries_per_page) - 1, \
283 CLIB_CACHE_LINE_BYTES); \
284 working_copy = (void *) working_copy##size; \
286 foreach_size_in_u32x4;
292 t->working_copies[cpu_number] = working_copy;
295 _vec_len(working_copy) = (1<<b->log2_pages)*t->entries_per_page;
296 clib_mem_set_heap (oldheap);
298 v = vnet_classify_get_entry (t, b->offset);
300 switch(t->match_n_vectors)
304 clib_memcpy (working_copy, v, \
305 sizeof (vnet_classify_entry_##size##_t) \
306 * (1<<b->log2_pages) \
307 * (t->entries_per_page)); \
309 foreach_size_in_u32x4 ;
316 working_bucket.as_u64 = b->as_u64;
317 working_bucket.offset = vnet_classify_get_offset (t, working_copy);
318 CLIB_MEMORY_BARRIER();
319 b->as_u64 = working_bucket.as_u64;
320 t->working_copies[cpu_number] = working_copy;
323 static vnet_classify_entry_t *
324 split_and_rehash (vnet_classify_table_t * t,
325 vnet_classify_entry_t * old_values,
328 vnet_classify_entry_t * new_values, * v, * new_v;
331 new_values = vnet_classify_entry_alloc (t, new_log2_pages);
333 for (i = 0; i < (vec_len (old_values)/t->entries_per_page); i++)
337 for (j = 0; j < t->entries_per_page; j++)
339 v = vnet_classify_entry_at_index
340 (t, old_values, i * t->entries_per_page + j);
342 if (vnet_classify_entry_is_busy (v))
344 /* Hack so we can use the packet hash routine */
346 key_minus_skip = (u8 *) v->key;
347 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
349 new_hash = vnet_classify_hash_packet (t, key_minus_skip);
350 new_hash >>= t->log2_nbuckets;
351 new_hash &= (1<<new_log2_pages) - 1;
353 for (k = 0; k < t->entries_per_page; k++)
355 new_v = vnet_classify_entry_at_index (t, new_values,
358 if (vnet_classify_entry_is_free (new_v))
360 clib_memcpy (new_v, v, sizeof (vnet_classify_entry_t)
361 + (t->match_n_vectors * sizeof (u32x4)));
362 new_v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
366 /* Crap. Tell caller to try again */
367 vnet_classify_entry_free (t, new_values);
377 int vnet_classify_add_del (vnet_classify_table_t * t,
378 vnet_classify_entry_t * add_v,
382 vnet_classify_bucket_t * b, tmp_b;
383 vnet_classify_entry_t * v, * new_v, * save_new_v, * working_copy, * save_v;
389 u32 cpu_number = os_get_cpu_number();
392 ASSERT ((add_v->flags & VNET_CLASSIFY_ENTRY_FREE) == 0);
394 key_minus_skip = (u8 *) add_v->key;
395 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
397 hash = vnet_classify_hash_packet (t, key_minus_skip);
399 bucket_index = hash & (t->nbuckets-1);
400 b = &t->buckets[bucket_index];
402 hash >>= t->log2_nbuckets;
404 while (__sync_lock_test_and_set (t->writer_lock, 1))
407 /* First elt in the bucket? */
416 v = vnet_classify_entry_alloc (t, 0 /* new_log2_pages */);
417 clib_memcpy (v, add_v, sizeof (vnet_classify_entry_t) +
418 t->match_n_vectors * sizeof (u32x4));
419 v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
422 tmp_b.offset = vnet_classify_get_offset (t, v);
424 b->as_u64 = tmp_b.as_u64;
425 t->active_elements ++;
430 make_working_copy (t, b);
432 save_v = vnet_classify_get_entry (t, t->saved_bucket.offset);
433 value_index = hash & ((1<<t->saved_bucket.log2_pages)-1);
438 * For obvious (in hindsight) reasons, see if we're supposed to
439 * replace an existing key, then look for an empty slot.
442 for (i = 0; i < t->entries_per_page; i++)
444 v = vnet_classify_entry_at_index (t, save_v, value_index + i);
446 if (!memcmp (v->key, add_v->key, t->match_n_vectors * sizeof (u32x4)))
448 clib_memcpy (v, add_v, sizeof (vnet_classify_entry_t) +
449 t->match_n_vectors * sizeof(u32x4));
450 v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
452 CLIB_MEMORY_BARRIER();
453 /* Restore the previous (k,v) pairs */
454 b->as_u64 = t->saved_bucket.as_u64;
458 for (i = 0; i < t->entries_per_page; i++)
460 v = vnet_classify_entry_at_index (t, save_v, value_index + i);
462 if (vnet_classify_entry_is_free (v))
464 clib_memcpy (v, add_v, sizeof (vnet_classify_entry_t) +
465 t->match_n_vectors * sizeof(u32x4));
466 v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
467 CLIB_MEMORY_BARRIER();
468 b->as_u64 = t->saved_bucket.as_u64;
469 t->active_elements ++;
473 /* no room at the inn... split case... */
477 for (i = 0; i < t->entries_per_page; i++)
479 v = vnet_classify_entry_at_index (t, save_v, value_index + i);
481 if (!memcmp (v->key, add_v->key, t->match_n_vectors * sizeof (u32x4)))
483 memset (v, 0xff, sizeof (vnet_classify_entry_t) +
484 t->match_n_vectors * sizeof(u32x4));
485 v->flags |= VNET_CLASSIFY_ENTRY_FREE;
486 CLIB_MEMORY_BARRIER();
487 b->as_u64 = t->saved_bucket.as_u64;
488 t->active_elements --;
493 b->as_u64 = t->saved_bucket.as_u64;
497 new_log2_pages = t->saved_bucket.log2_pages + 1;
500 working_copy = t->working_copies[cpu_number];
501 new_v = split_and_rehash (t, working_copy, new_log2_pages);
509 /* Try to add the new entry */
512 key_minus_skip = (u8 *) add_v->key;
513 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
515 new_hash = vnet_classify_hash_packet_inline (t, key_minus_skip);
516 new_hash >>= t->log2_nbuckets;
517 new_hash &= (1<<min_log2((vec_len(new_v)/t->entries_per_page))) - 1;
519 for (i = 0; i < t->entries_per_page; i++)
521 new_v = vnet_classify_entry_at_index (t, save_new_v, new_hash + i);
523 if (vnet_classify_entry_is_free (new_v))
525 clib_memcpy (new_v, add_v, sizeof (vnet_classify_entry_t) +
526 t->match_n_vectors * sizeof(u32x4));
527 new_v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
531 /* Crap. Try again */
533 vnet_classify_entry_free (t, save_new_v);
537 tmp_b.log2_pages = min_log2 (vec_len (save_new_v)/t->entries_per_page);
538 tmp_b.offset = vnet_classify_get_offset (t, save_new_v);
539 CLIB_MEMORY_BARRIER();
540 b->as_u64 = tmp_b.as_u64;
541 t->active_elements ++;
542 v = vnet_classify_get_entry (t, t->saved_bucket.offset);
543 vnet_classify_entry_free (t, v);
546 CLIB_MEMORY_BARRIER();
547 t->writer_lock[0] = 0;
552 typedef CLIB_PACKED(struct {
553 ethernet_header_t eh;
555 }) classify_data_or_mask_t;
557 u64 vnet_classify_hash_packet (vnet_classify_table_t * t, u8 * h)
559 return vnet_classify_hash_packet_inline (t, h);
562 vnet_classify_entry_t *
563 vnet_classify_find_entry (vnet_classify_table_t * t,
564 u8 * h, u64 hash, f64 now)
566 return vnet_classify_find_entry_inline (t, h, hash, now);
569 static u8 * format_classify_entry (u8 * s, va_list * args)
571 vnet_classify_table_t * t = va_arg (*args, vnet_classify_table_t *);
572 vnet_classify_entry_t * e = va_arg (*args, vnet_classify_entry_t *);
575 (s, "[%u]: next_index %d advance %d opaque %d action %d metadata %d\n",
576 vnet_classify_get_offset (t, e), e->next_index, e->advance,
577 e->opaque_index, e->action, e->metadata);
580 s = format (s, " k: %U\n", format_hex_bytes, e->key,
581 t->match_n_vectors * sizeof(u32x4));
583 if (vnet_classify_entry_is_busy (e))
584 s = format (s, " hits %lld, last_heard %.2f\n",
585 e->hits, e->last_heard);
587 s = format (s, " entry is free\n");
591 u8 * format_classify_table (u8 * s, va_list * args)
593 vnet_classify_table_t * t = va_arg (*args, vnet_classify_table_t *);
594 int verbose = va_arg (*args, int);
595 vnet_classify_bucket_t * b;
596 vnet_classify_entry_t * v, * save_v;
598 u64 active_elements = 0;
600 for (i = 0; i < t->nbuckets; i++)
606 s = format (s, "[%d]: empty\n", i);
612 s = format (s, "[%d]: heap offset %d, len %d\n", i,
613 b->offset, (1<<b->log2_pages));
616 save_v = vnet_classify_get_entry (t, b->offset);
617 for (j = 0; j < (1<<b->log2_pages); j++)
619 for (k = 0; k < t->entries_per_page; k++)
622 v = vnet_classify_entry_at_index (t, save_v,
623 j*t->entries_per_page + k);
625 if (vnet_classify_entry_is_free (v))
628 s = format (s, " %d: empty\n",
629 j * t->entries_per_page + k);
634 s = format (s, " %d: %U\n",
635 j * t->entries_per_page + k,
636 format_classify_entry, t, v);
643 s = format (s, " %lld active elements\n", active_elements);
644 s = format (s, " %d free lists\n", vec_len (t->freelists));
648 int vnet_classify_add_del_table (vnet_classify_main_t * cm,
654 u32 next_table_index,
657 u8 current_data_flag,
658 i16 current_data_offset,
661 vnet_classify_table_t * t;
665 if (*table_index == ~0) /* add */
667 if (memory_size == 0)
668 return VNET_API_ERROR_INVALID_MEMORY_SIZE;
671 return VNET_API_ERROR_INVALID_VALUE;
673 t = vnet_classify_new_table (cm, mask, nbuckets, memory_size,
675 t->next_table_index = next_table_index;
676 t->miss_next_index = miss_next_index;
677 t->current_data_flag = current_data_flag;
678 t->current_data_offset = current_data_offset;
679 *table_index = t - cm->tables;
683 vnet_classify_main_t *cm = &vnet_classify_main;
684 t = pool_elt_at_index (cm->tables, *table_index);
686 t->next_table_index = next_table_index;
691 vnet_classify_delete_table_index (cm, *table_index);
695 #define foreach_tcp_proto_field \
699 #define foreach_udp_proto_field \
703 #define foreach_ip4_proto_field \
713 uword unformat_tcp_mask (unformat_input_t * input, va_list * args)
715 u8 ** maskp = va_arg (*args, u8 **);
717 u8 found_something = 0;
721 foreach_tcp_proto_field;
724 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
727 #define _(a) else if (unformat (input, #a)) a=1;
728 foreach_tcp_proto_field
734 #define _(a) found_something += a;
735 foreach_tcp_proto_field;
738 if (found_something == 0)
741 vec_validate (mask, sizeof (*tcp) - 1);
743 tcp = (tcp_header_t *) mask;
745 #define _(a) if (a) memset (&tcp->a, 0xff, sizeof (tcp->a));
746 foreach_tcp_proto_field;
753 uword unformat_udp_mask (unformat_input_t * input, va_list * args)
755 u8 ** maskp = va_arg (*args, u8 **);
757 u8 found_something = 0;
761 foreach_udp_proto_field;
764 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
767 #define _(a) else if (unformat (input, #a)) a=1;
768 foreach_udp_proto_field
774 #define _(a) found_something += a;
775 foreach_udp_proto_field;
778 if (found_something == 0)
781 vec_validate (mask, sizeof (*udp) - 1);
783 udp = (udp_header_t *) mask;
785 #define _(a) if (a) memset (&udp->a, 0xff, sizeof (udp->a));
786 foreach_udp_proto_field;
794 u16 src_port, dst_port;
797 uword unformat_l4_mask (unformat_input_t * input, va_list * args)
799 u8 ** maskp = va_arg (*args, u8 **);
800 u16 src_port = 0, dst_port = 0;
801 tcpudp_header_t * tcpudp;
803 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
805 if (unformat (input, "tcp %U", unformat_tcp_mask, maskp))
807 else if (unformat (input, "udp %U", unformat_udp_mask, maskp))
809 else if (unformat (input, "src_port"))
811 else if (unformat (input, "dst_port"))
817 if (!src_port && !dst_port)
821 vec_validate (mask, sizeof (tcpudp_header_t) - 1);
823 tcpudp = (tcpudp_header_t *) mask;
824 tcpudp->src_port = src_port;
825 tcpudp->dst_port = dst_port;
832 uword unformat_ip4_mask (unformat_input_t * input, va_list * args)
834 u8 ** maskp = va_arg (*args, u8 **);
836 u8 found_something = 0;
840 foreach_ip4_proto_field;
846 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
848 if (unformat (input, "version"))
850 else if (unformat (input, "hdr_length"))
852 else if (unformat (input, "src"))
854 else if (unformat (input, "dst"))
856 else if (unformat (input, "proto"))
859 #define _(a) else if (unformat (input, #a)) a=1;
860 foreach_ip4_proto_field
866 #define _(a) found_something += a;
867 foreach_ip4_proto_field;
870 if (found_something == 0)
873 vec_validate (mask, sizeof (*ip) - 1);
875 ip = (ip4_header_t *) mask;
877 #define _(a) if (a) memset (&ip->a, 0xff, sizeof (ip->a));
878 foreach_ip4_proto_field;
881 ip->ip_version_and_header_length = 0;
884 ip->ip_version_and_header_length |= 0xF0;
887 ip->ip_version_and_header_length |= 0x0F;
893 #define foreach_ip6_proto_field \
900 uword unformat_ip6_mask (unformat_input_t * input, va_list * args)
902 u8 ** maskp = va_arg (*args, u8 **);
904 u8 found_something = 0;
906 u32 ip_version_traffic_class_and_flow_label;
909 foreach_ip6_proto_field;
912 u8 traffic_class = 0;
915 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
917 if (unformat (input, "version"))
919 else if (unformat (input, "traffic-class"))
921 else if (unformat (input, "flow-label"))
923 else if (unformat (input, "src"))
925 else if (unformat (input, "dst"))
927 else if (unformat (input, "proto"))
930 #define _(a) else if (unformat (input, #a)) a=1;
931 foreach_ip6_proto_field
937 #define _(a) found_something += a;
938 foreach_ip6_proto_field;
941 if (found_something == 0)
944 vec_validate (mask, sizeof (*ip) - 1);
946 ip = (ip6_header_t *) mask;
948 #define _(a) if (a) memset (&ip->a, 0xff, sizeof (ip->a));
949 foreach_ip6_proto_field;
952 ip_version_traffic_class_and_flow_label = 0;
955 ip_version_traffic_class_and_flow_label |= 0xF0000000;
958 ip_version_traffic_class_and_flow_label |= 0x0FF00000;
961 ip_version_traffic_class_and_flow_label |= 0x000FFFFF;
963 ip->ip_version_traffic_class_and_flow_label =
964 clib_host_to_net_u32 (ip_version_traffic_class_and_flow_label);
970 uword unformat_l3_mask (unformat_input_t * input, va_list * args)
972 u8 ** maskp = va_arg (*args, u8 **);
974 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
975 if (unformat (input, "ip4 %U", unformat_ip4_mask, maskp))
977 else if (unformat (input, "ip6 %U", unformat_ip6_mask, maskp))
985 uword unformat_l2_mask (unformat_input_t * input, va_list * args)
987 u8 ** maskp = va_arg (*args, u8 **);
1002 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1003 if (unformat (input, "src"))
1005 else if (unformat (input, "dst"))
1007 else if (unformat (input, "proto"))
1009 else if (unformat (input, "tag1"))
1011 else if (unformat (input, "tag2"))
1013 else if (unformat (input, "ignore-tag1"))
1015 else if (unformat (input, "ignore-tag2"))
1017 else if (unformat (input, "cos1"))
1019 else if (unformat (input, "cos2"))
1021 else if (unformat (input, "dot1q"))
1023 else if (unformat (input, "dot1ad"))
1028 if ((src + dst + proto + tag1 + tag2 + dot1q + dot1ad +
1029 ignore_tag1 + ignore_tag2 + cos1 + cos2) == 0)
1032 if (tag1 || ignore_tag1 || cos1 || dot1q)
1034 if (tag2 || ignore_tag2 || cos2 || dot1ad)
1037 vec_validate (mask, len-1);
1040 memset (mask, 0xff, 6);
1043 memset (mask + 6, 0xff, 6);
1047 /* inner vlan tag */
1056 mask[21] = mask [20] = 0xff;
1077 mask[16] = mask [17] = 0xff;
1086 mask[12] = mask [13] = 0xff;
1092 uword unformat_classify_mask (unformat_input_t * input, va_list * args)
1094 vnet_classify_main_t * CLIB_UNUSED(cm)
1095 = va_arg (*args, vnet_classify_main_t *);
1096 u8 ** maskp = va_arg (*args, u8 **);
1097 u32 * skipp = va_arg (*args, u32 *);
1098 u32 * matchp = va_arg (*args, u32 *);
1106 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1107 if (unformat (input, "hex %U", unformat_hex_string, &mask))
1109 else if (unformat (input, "l2 %U", unformat_l2_mask, &l2))
1111 else if (unformat (input, "l3 %U", unformat_l3_mask, &l3))
1113 else if (unformat (input, "l4 %U", unformat_l4_mask, &l4))
1126 if (mask || l2 || l3 || l4)
1130 /* "With a free Ethernet header in every package" */
1132 vec_validate (l2, 13);
1136 vec_append (mask, l3);
1141 vec_append (mask, l4);
1146 /* Scan forward looking for the first significant mask octet */
1147 for (i = 0; i < vec_len (mask); i++)
1151 /* compute (skip, match) params */
1152 *skipp = i / sizeof(u32x4);
1153 vec_delete (mask, *skipp * sizeof(u32x4), 0);
1155 /* Pad mask to an even multiple of the vector size */
1156 while (vec_len (mask) % sizeof (u32x4))
1159 match = vec_len (mask) / sizeof (u32x4);
1161 for (i = match*sizeof(u32x4); i > 0; i-= sizeof(u32x4))
1163 u64 *tmp = (u64 *)(mask + (i-sizeof(u32x4)));
1164 if (*tmp || *(tmp+1))
1169 clib_warning ("BUG: match 0");
1171 _vec_len (mask) = match * sizeof(u32x4);
1182 #define foreach_l2_input_next \
1184 _(ethernet, ETHERNET_INPUT) \
1189 uword unformat_l2_input_next_index (unformat_input_t * input, va_list * args)
1191 vnet_classify_main_t * cm = &vnet_classify_main;
1192 u32 * miss_next_indexp = va_arg (*args, u32 *);
1197 /* First try registered unformat fns, allowing override... */
1198 for (i = 0; i < vec_len (cm->unformat_l2_next_index_fns); i++)
1200 if (unformat (input, "%U", cm->unformat_l2_next_index_fns[i], &tmp))
1208 if (unformat (input, #n)) { next_index = L2_INPUT_CLASSIFY_NEXT_##N; goto out;}
1209 foreach_l2_input_next;
1212 if (unformat (input, "%d", &tmp))
1221 *miss_next_indexp = next_index;
1225 #define foreach_l2_output_next \
1228 uword unformat_l2_output_next_index (unformat_input_t * input, va_list * args)
1230 vnet_classify_main_t * cm = &vnet_classify_main;
1231 u32 * miss_next_indexp = va_arg (*args, u32 *);
1236 /* First try registered unformat fns, allowing override... */
1237 for (i = 0; i < vec_len (cm->unformat_l2_next_index_fns); i++)
1239 if (unformat (input, "%U", cm->unformat_l2_next_index_fns[i], &tmp))
1247 if (unformat (input, #n)) { next_index = L2_OUTPUT_CLASSIFY_NEXT_##N; goto out;}
1248 foreach_l2_output_next;
1251 if (unformat (input, "%d", &tmp))
1260 *miss_next_indexp = next_index;
1264 #define foreach_ip_next \
1268 uword unformat_ip_next_index (unformat_input_t * input, va_list * args)
1270 u32 * miss_next_indexp = va_arg (*args, u32 *);
1271 vnet_classify_main_t * cm = &vnet_classify_main;
1276 /* First try registered unformat fns, allowing override... */
1277 for (i = 0; i < vec_len (cm->unformat_ip_next_index_fns); i++)
1279 if (unformat (input, "%U", cm->unformat_ip_next_index_fns[i], &tmp))
1287 if (unformat (input, #n)) { next_index = IP_LOOKUP_NEXT_##N; goto out;}
1291 if (unformat (input, "%d", &tmp))
1300 *miss_next_indexp = next_index;
1304 #define foreach_acl_next \
1307 uword unformat_acl_next_index (unformat_input_t * input, va_list * args)
1309 u32 * next_indexp = va_arg (*args, u32 *);
1310 vnet_classify_main_t * cm = &vnet_classify_main;
1315 /* First try registered unformat fns, allowing override... */
1316 for (i = 0; i < vec_len (cm->unformat_acl_next_index_fns); i++)
1318 if (unformat (input, "%U", cm->unformat_acl_next_index_fns[i], &tmp))
1326 if (unformat (input, #n)) { next_index = ACL_NEXT_INDEX_##N; goto out;}
1330 if (unformat (input, "permit"))
1335 else if (unformat (input, "%d", &tmp))
1344 *next_indexp = next_index;
1348 uword unformat_policer_next_index (unformat_input_t * input, va_list * args)
1350 u32 * next_indexp = va_arg (*args, u32 *);
1351 vnet_classify_main_t * cm = &vnet_classify_main;
1356 /* First try registered unformat fns, allowing override... */
1357 for (i = 0; i < vec_len (cm->unformat_policer_next_index_fns); i++)
1359 if (unformat (input, "%U", cm->unformat_policer_next_index_fns[i], &tmp))
1366 if (unformat (input, "%d", &tmp))
1375 *next_indexp = next_index;
1379 static clib_error_t *
1380 classify_table_command_fn (vlib_main_t * vm,
1381 unformat_input_t * input,
1382 vlib_cli_command_t * cmd)
1388 u32 table_index = ~0;
1389 u32 next_table_index = ~0;
1390 u32 miss_next_index = ~0;
1391 u32 memory_size = 2<<20;
1393 u32 current_data_flag = 0;
1394 int current_data_offset = 0;
1397 vnet_classify_main_t * cm = &vnet_classify_main;
1400 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1401 if (unformat (input, "del"))
1403 else if (unformat (input, "buckets %d", &nbuckets))
1405 else if (unformat (input, "skip %d", &skip))
1407 else if (unformat (input, "match %d", &match))
1409 else if (unformat (input, "table %d", &table_index))
1411 else if (unformat (input, "mask %U", unformat_classify_mask,
1412 cm, &mask, &skip, &match))
1414 else if (unformat (input, "memory-size %uM", &tmp))
1415 memory_size = tmp<<20;
1416 else if (unformat (input, "memory-size %uG", &tmp))
1417 memory_size = tmp<<30;
1418 else if (unformat (input, "next-table %d", &next_table_index))
1420 else if (unformat (input, "miss-next %U", unformat_ip_next_index,
1423 else if (unformat (input, "l2-input-miss-next %U", unformat_l2_input_next_index,
1426 else if (unformat (input, "l2-output-miss-next %U", unformat_l2_output_next_index,
1429 else if (unformat (input, "acl-miss-next %U", unformat_acl_next_index,
1432 else if (unformat (input, "current-data-flag %d", ¤t_data_flag))
1434 else if (unformat (input, "current-data-offset %d", ¤t_data_offset))
1441 if (is_add && mask == 0 && table_index == ~0)
1442 return clib_error_return (0, "Mask required");
1444 if (is_add && skip == ~0 && table_index == ~0)
1445 return clib_error_return (0, "skip count required");
1447 if (is_add && match == ~0 && table_index == ~0)
1448 return clib_error_return (0, "match count required");
1450 if (!is_add && table_index == ~0)
1451 return clib_error_return (0, "table index required for delete");
1453 rv = vnet_classify_add_del_table (cm, mask, nbuckets, memory_size,
1454 skip, match, next_table_index, miss_next_index,
1455 &table_index, current_data_flag, current_data_offset, is_add);
1462 return clib_error_return (0, "vnet_classify_add_del_table returned %d",
1468 VLIB_CLI_COMMAND (classify_table, static) = {
1469 .path = "classify table",
1471 "classify table [miss-next|l2-miss_next|acl-miss-next <next_index>]"
1472 "\n mask <mask-value> buckets <nn> [skip <n>] [match <n>]"
1473 "\n [current-data-flag <n>] [current-data-offset <n>] [table <n>] [del]",
1474 .function = classify_table_command_fn,
1477 static u8 * format_vnet_classify_table (u8 * s, va_list * args)
1479 vnet_classify_main_t * cm = va_arg (*args, vnet_classify_main_t *);
1480 int verbose = va_arg (*args, int);
1481 u32 index = va_arg (*args, u32);
1482 vnet_classify_table_t * t;
1486 s = format (s, "%10s%10s%10s%10s", "TableIdx", "Sessions", "NextTbl",
1487 "NextNode", verbose ? "Details" : "");
1491 t = pool_elt_at_index (cm->tables, index);
1492 s = format (s, "%10u%10d%10d%10d", index, t->active_elements,
1493 t->next_table_index, t->miss_next_index);
1495 s = format (s, "\n Heap: %U", format_mheap, t->mheap, 0 /*verbose*/);
1497 s = format (s, "\n nbuckets %d, skip %d match %d flag %d offset %d",
1498 t->nbuckets, t->skip_n_vectors, t->match_n_vectors,
1499 t->current_data_flag, t->current_data_offset);
1500 s = format (s, "\n mask %U", format_hex_bytes, t->mask,
1501 t->match_n_vectors * sizeof (u32x4));
1506 s = format (s, "\n%U", format_classify_table, t, verbose);
1511 static clib_error_t *
1512 show_classify_tables_command_fn (vlib_main_t * vm,
1513 unformat_input_t * input,
1514 vlib_cli_command_t * cmd)
1516 vnet_classify_main_t * cm = &vnet_classify_main;
1517 vnet_classify_table_t * t;
1518 u32 match_index = ~0;
1523 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1525 if (unformat (input, "index %d", &match_index))
1527 else if (unformat (input, "verbose %d", &verbose))
1529 else if (unformat (input, "verbose"))
1535 pool_foreach (t, cm->tables,
1537 if (match_index == ~0 || (match_index == t - cm->tables))
1538 vec_add1 (indices, t - cm->tables);
1541 if (vec_len(indices))
1543 vlib_cli_output (vm, "%U", format_vnet_classify_table, cm, verbose,
1545 for (i = 0; i < vec_len (indices); i++)
1546 vlib_cli_output (vm, "%U", format_vnet_classify_table, cm,
1547 verbose, indices[i]);
1550 vlib_cli_output (vm, "No classifier tables configured");
1557 VLIB_CLI_COMMAND (show_classify_table_command, static) = {
1558 .path = "show classify tables",
1559 .short_help = "show classify tables [index <nn>]",
1560 .function = show_classify_tables_command_fn,
1563 uword unformat_l4_match (unformat_input_t * input, va_list * args)
1565 u8 ** matchp = va_arg (*args, u8 **);
1567 u8 * proto_header = 0;
1573 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1575 if (unformat (input, "src_port %d", &src_port))
1577 else if (unformat (input, "dst_port %d", &dst_port))
1583 h.src_port = clib_host_to_net_u16(src_port);
1584 h.dst_port = clib_host_to_net_u16(dst_port);
1585 vec_validate(proto_header, sizeof(h)-1);
1586 memcpy(proto_header, &h, sizeof(h));
1588 *matchp = proto_header;
1593 uword unformat_ip4_match (unformat_input_t * input, va_list * args)
1595 u8 ** matchp = va_arg (*args, u8 **);
1602 int src = 0, dst = 0;
1603 ip4_address_t src_val, dst_val;
1610 int fragment_id = 0;
1611 u32 fragment_id_val;
1617 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1619 if (unformat (input, "version %d", &version_val))
1621 else if (unformat (input, "hdr_length %d", &hdr_length_val))
1623 else if (unformat (input, "src %U", unformat_ip4_address, &src_val))
1625 else if (unformat (input, "dst %U", unformat_ip4_address, &dst_val))
1627 else if (unformat (input, "proto %d", &proto_val))
1629 else if (unformat (input, "tos %d", &tos_val))
1631 else if (unformat (input, "length %d", &length_val))
1633 else if (unformat (input, "fragment_id %d", &fragment_id_val))
1635 else if (unformat (input, "ttl %d", &ttl_val))
1637 else if (unformat (input, "checksum %d", &checksum_val))
1643 if (version + hdr_length + src + dst + proto + tos + length + fragment_id
1644 + ttl + checksum == 0)
1648 * Aligned because we use the real comparison functions
1650 vec_validate_aligned (match, sizeof (*ip) - 1, sizeof(u32x4));
1652 ip = (ip4_header_t *) match;
1654 /* These are realistically matched in practice */
1656 ip->src_address.as_u32 = src_val.as_u32;
1659 ip->dst_address.as_u32 = dst_val.as_u32;
1662 ip->protocol = proto_val;
1665 /* These are not, but they're included for completeness */
1667 ip->ip_version_and_header_length |= (version_val & 0xF)<<4;
1670 ip->ip_version_and_header_length |= (hdr_length_val & 0xF);
1676 ip->length = clib_host_to_net_u16 (length_val);
1682 ip->checksum = clib_host_to_net_u16 (checksum_val);
1688 uword unformat_ip6_match (unformat_input_t * input, va_list * args)
1690 u8 ** matchp = va_arg (*args, u8 **);
1695 u8 traffic_class = 0;
1696 u32 traffic_class_val;
1699 int src = 0, dst = 0;
1700 ip6_address_t src_val, dst_val;
1703 int payload_length = 0;
1704 u32 payload_length_val;
1707 u32 ip_version_traffic_class_and_flow_label;
1709 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1711 if (unformat (input, "version %d", &version_val))
1713 else if (unformat (input, "traffic_class %d", &traffic_class_val))
1715 else if (unformat (input, "flow_label %d", &flow_label_val))
1717 else if (unformat (input, "src %U", unformat_ip6_address, &src_val))
1719 else if (unformat (input, "dst %U", unformat_ip6_address, &dst_val))
1721 else if (unformat (input, "proto %d", &proto_val))
1723 else if (unformat (input, "payload_length %d", &payload_length_val))
1725 else if (unformat (input, "hop_limit %d", &hop_limit_val))
1731 if (version + traffic_class + flow_label + src + dst + proto +
1732 payload_length + hop_limit == 0)
1736 * Aligned because we use the real comparison functions
1738 vec_validate_aligned (match, sizeof (*ip) - 1, sizeof(u32x4));
1740 ip = (ip6_header_t *) match;
1743 clib_memcpy (&ip->src_address, &src_val, sizeof (ip->src_address));
1746 clib_memcpy (&ip->dst_address, &dst_val, sizeof (ip->dst_address));
1749 ip->protocol = proto_val;
1751 ip_version_traffic_class_and_flow_label = 0;
1754 ip_version_traffic_class_and_flow_label |= (version_val & 0xF) << 28;
1757 ip_version_traffic_class_and_flow_label |= (traffic_class_val & 0xFF) << 20;
1760 ip_version_traffic_class_and_flow_label |= (flow_label_val & 0xFFFFF);
1762 ip->ip_version_traffic_class_and_flow_label =
1763 clib_host_to_net_u32 (ip_version_traffic_class_and_flow_label);
1766 ip->payload_length = clib_host_to_net_u16 (payload_length_val);
1769 ip->hop_limit = hop_limit_val;
1775 uword unformat_l3_match (unformat_input_t * input, va_list * args)
1777 u8 ** matchp = va_arg (*args, u8 **);
1779 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1780 if (unformat (input, "ip4 %U", unformat_ip4_match, matchp))
1782 else if (unformat (input, "ip6 %U", unformat_ip6_match, matchp))
1791 uword unformat_vlan_tag (unformat_input_t * input, va_list * args)
1793 u8 * tagp = va_arg (*args, u8 *);
1796 if (unformat(input, "%d", &tag))
1798 tagp[0] = (tag>>8) & 0x0F;
1799 tagp[1] = tag & 0xFF;
1806 uword unformat_l2_match (unformat_input_t * input, va_list * args)
1808 u8 ** matchp = va_arg (*args, u8 **);
1828 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1829 if (unformat (input, "src %U", unformat_ethernet_address, &src_val))
1831 else if (unformat (input, "dst %U", unformat_ethernet_address, &dst_val))
1833 else if (unformat (input, "proto %U",
1834 unformat_ethernet_type_host_byte_order, &proto_val))
1836 else if (unformat (input, "tag1 %U", unformat_vlan_tag, tag1_val))
1838 else if (unformat (input, "tag2 %U", unformat_vlan_tag, tag2_val))
1840 else if (unformat (input, "ignore-tag1"))
1842 else if (unformat (input, "ignore-tag2"))
1844 else if (unformat (input, "cos1 %d", &cos1_val))
1846 else if (unformat (input, "cos2 %d", &cos2_val))
1851 if ((src + dst + proto + tag1 + tag2 +
1852 ignore_tag1 + ignore_tag2 + cos1 + cos2) == 0)
1855 if (tag1 || ignore_tag1 || cos1)
1857 if (tag2 || ignore_tag2 || cos2)
1860 vec_validate_aligned (match, len-1, sizeof(u32x4));
1863 clib_memcpy (match, dst_val, 6);
1866 clib_memcpy (match + 6, src_val, 6);
1870 /* inner vlan tag */
1871 match[19] = tag2_val[1];
1872 match[18] = tag2_val[0];
1874 match [18] |= (cos2_val & 0x7) << 5;
1877 match[21] = proto_val & 0xff;
1878 match[20] = proto_val >> 8;
1882 match [15] = tag1_val[1];
1883 match [14] = tag1_val[0];
1886 match [14] |= (cos1_val & 0x7) << 5;
1892 match [15] = tag1_val[1];
1893 match [14] = tag1_val[0];
1896 match[17] = proto_val & 0xff;
1897 match[16] = proto_val >> 8;
1900 match [14] |= (cos1_val & 0x7) << 5;
1906 match [18] |= (cos2_val & 0x7) << 5;
1908 match [14] |= (cos1_val & 0x7) << 5;
1911 match[13] = proto_val & 0xff;
1912 match[12] = proto_val >> 8;
1920 uword unformat_classify_match (unformat_input_t * input, va_list * args)
1922 vnet_classify_main_t * cm = va_arg (*args, vnet_classify_main_t *);
1923 u8 ** matchp = va_arg (*args, u8 **);
1924 u32 table_index = va_arg (*args, u32);
1925 vnet_classify_table_t * t;
1932 if (pool_is_free_index (cm->tables, table_index))
1935 t = pool_elt_at_index (cm->tables, table_index);
1937 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1938 if (unformat (input, "hex %U", unformat_hex_string, &match))
1940 else if (unformat (input, "l2 %U", unformat_l2_match, &l2))
1942 else if (unformat (input, "l3 %U", unformat_l3_match, &l3))
1944 else if (unformat (input, "l4 %U", unformat_l4_match, &l4))
1957 if (match || l2 || l3 || l4)
1961 /* "Win a free Ethernet header in every packet" */
1963 vec_validate_aligned (l2, 13, sizeof(u32x4));
1967 vec_append_aligned (match, l3, sizeof(u32x4));
1972 vec_append_aligned (match, l4, sizeof(u32x4));
1977 /* Make sure the vector is big enough even if key is all 0's */
1978 vec_validate_aligned
1979 (match, ((t->match_n_vectors + t->skip_n_vectors) * sizeof(u32x4)) - 1,
1982 /* Set size, include skipped vectors*/
1983 _vec_len (match) = (t->match_n_vectors+t->skip_n_vectors) * sizeof(u32x4);
1993 int vnet_classify_add_del_session (vnet_classify_main_t * cm,
2003 vnet_classify_table_t * t;
2004 vnet_classify_entry_5_t _max_e __attribute__((aligned (16)));
2005 vnet_classify_entry_t * e;
2008 if (pool_is_free_index (cm->tables, table_index))
2009 return VNET_API_ERROR_NO_SUCH_TABLE;
2011 t = pool_elt_at_index (cm->tables, table_index);
2013 e = (vnet_classify_entry_t *)&_max_e;
2014 e->next_index = hit_next_index;
2015 e->opaque_index = opaque_index;
2016 e->advance = advance;
2021 if (e->action == CLASSIFY_ACTION_SET_IP4_FIB_INDEX)
2022 e->metadata = fib_table_find_or_create_and_lock (FIB_PROTOCOL_IP4, metadata);
2023 else if (e->action == CLASSIFY_ACTION_SET_IP6_FIB_INDEX)
2024 e->metadata = fib_table_find_or_create_and_lock (FIB_PROTOCOL_IP6, metadata);
2026 /* Copy key data, honoring skip_n_vectors */
2027 clib_memcpy (&e->key, match + t->skip_n_vectors * sizeof (u32x4),
2028 t->match_n_vectors * sizeof (u32x4));
2030 /* Clear don't-care bits; likely when dynamically creating sessions */
2031 for (i = 0; i < t->match_n_vectors; i++)
2032 e->key[i] &= t->mask[i];
2034 rv = vnet_classify_add_del (t, e, is_add);
2036 return VNET_API_ERROR_NO_SUCH_ENTRY;
2040 static clib_error_t *
2041 classify_session_command_fn (vlib_main_t * vm,
2042 unformat_input_t * input,
2043 vlib_cli_command_t * cmd)
2045 vnet_classify_main_t * cm = &vnet_classify_main;
2047 u32 table_index = ~0;
2048 u32 hit_next_index = ~0;
2049 u64 opaque_index = ~0;
2056 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
2058 if (unformat (input, "del"))
2060 else if (unformat (input, "hit-next %U", unformat_ip_next_index,
2063 else if (unformat (input, "l2-input-hit-next %U", unformat_l2_input_next_index,
2066 else if (unformat (input, "l2-output-hit-next %U", unformat_l2_output_next_index,
2069 else if (unformat (input, "acl-hit-next %U", unformat_acl_next_index,
2072 else if (unformat (input, "policer-hit-next %U",
2073 unformat_policer_next_index, &hit_next_index))
2075 else if (unformat (input, "opaque-index %lld", &opaque_index))
2077 else if (unformat (input, "match %U", unformat_classify_match,
2078 cm, &match, table_index))
2080 else if (unformat (input, "advance %d", &advance))
2082 else if (unformat (input, "table-index %d", &table_index))
2084 else if (unformat (input, "action set-ip4-fib-id %d", &metadata))
2086 else if (unformat (input, "action set-ip6-fib-id %d", &metadata))
2090 /* Try registered opaque-index unformat fns */
2091 for (i = 0; i < vec_len (cm->unformat_opaque_index_fns); i++)
2093 if (unformat (input, "%U", cm->unformat_opaque_index_fns[i],
2103 if (table_index == ~0)
2104 return clib_error_return (0, "Table index required");
2106 if (is_add && match == 0)
2107 return clib_error_return (0, "Match value required");
2109 rv = vnet_classify_add_del_session (cm, table_index, match,
2111 opaque_index, advance,
2112 action, metadata, is_add);
2120 return clib_error_return (0, "vnet_classify_add_del_session returned %d",
2127 VLIB_CLI_COMMAND (classify_session_command, static) = {
2128 .path = "classify session",
2130 "classify session [hit-next|l2-hit-next|"
2131 "acl-hit-next <next_index>|policer-hit-next <policer_name>]"
2132 "\n table-index <nn> match [hex] [l2] [l3 ip4] [opaque-index <index>]"
2133 "\n [action set-ip4-fib-id <n>] [action set-ip6-fib-id <n>] [del]",
2134 .function = classify_session_command_fn,
2138 unformat_opaque_sw_if_index (unformat_input_t * input, va_list * args)
2140 u64 * opaquep = va_arg (*args, u64 *);
2143 if (unformat (input, "opaque-sw_if_index %U", unformat_vnet_sw_interface,
2144 vnet_get_main(), &sw_if_index))
2146 *opaquep = sw_if_index;
2153 unformat_ip_next_node (unformat_input_t * input, va_list * args)
2155 vnet_classify_main_t * cm = &vnet_classify_main;
2156 u32 * next_indexp = va_arg (*args, u32 *);
2158 u32 next_index = ~0;
2160 if (unformat (input, "ip6-node %U", unformat_vlib_node,
2161 cm->vlib_main, &node_index))
2163 next_index = vlib_node_add_next (cm->vlib_main,
2164 ip6_classify_node.index, node_index);
2166 else if (unformat (input, "ip4-node %U", unformat_vlib_node,
2167 cm->vlib_main, &node_index))
2169 next_index = vlib_node_add_next (cm->vlib_main,
2170 ip4_classify_node.index, node_index);
2175 *next_indexp = next_index;
2180 unformat_acl_next_node (unformat_input_t * input, va_list * args)
2182 vnet_classify_main_t * cm = &vnet_classify_main;
2183 u32 * next_indexp = va_arg (*args, u32 *);
2187 if (unformat (input, "ip6-node %U", unformat_vlib_node,
2188 cm->vlib_main, &node_index))
2190 next_index = vlib_node_add_next (cm->vlib_main,
2191 ip6_inacl_node.index, node_index);
2193 else if (unformat (input, "ip4-node %U", unformat_vlib_node,
2194 cm->vlib_main, &node_index))
2196 next_index = vlib_node_add_next (cm->vlib_main,
2197 ip4_inacl_node.index, node_index);
2202 *next_indexp = next_index;
2207 unformat_l2_input_next_node (unformat_input_t * input, va_list * args)
2209 vnet_classify_main_t * cm = &vnet_classify_main;
2210 u32 * next_indexp = va_arg (*args, u32 *);
2214 if (unformat (input, "input-node %U", unformat_vlib_node,
2215 cm->vlib_main, &node_index))
2217 next_index = vlib_node_add_next
2218 (cm->vlib_main, l2_input_classify_node.index, node_index);
2220 *next_indexp = next_index;
2227 unformat_l2_output_next_node (unformat_input_t * input, va_list * args)
2229 vnet_classify_main_t * cm = &vnet_classify_main;
2230 u32 * next_indexp = va_arg (*args, u32 *);
2234 if (unformat (input, "output-node %U", unformat_vlib_node,
2235 cm->vlib_main, &node_index))
2237 next_index = vlib_node_add_next
2238 (cm->vlib_main, l2_output_classify_node.index, node_index);
2240 *next_indexp = next_index;
2246 static clib_error_t *
2247 vnet_classify_init (vlib_main_t * vm)
2249 vnet_classify_main_t * cm = &vnet_classify_main;
2252 cm->vnet_main = vnet_get_main();
2254 vnet_classify_register_unformat_opaque_index_fn
2255 (unformat_opaque_sw_if_index);
2257 vnet_classify_register_unformat_ip_next_index_fn
2258 (unformat_ip_next_node);
2260 vnet_classify_register_unformat_l2_next_index_fn
2261 (unformat_l2_input_next_node);
2263 vnet_classify_register_unformat_l2_next_index_fn
2264 (unformat_l2_output_next_node);
2266 vnet_classify_register_unformat_acl_next_index_fn
2267 (unformat_acl_next_node);
2272 VLIB_INIT_FUNCTION (vnet_classify_init);
2277 static clib_error_t *
2278 test_classify_command_fn (vlib_main_t * vm,
2279 unformat_input_t * input,
2280 vlib_cli_command_t * cmd)
2285 vnet_classify_table_t * t = 0;
2286 classify_data_or_mask_t * mask;
2287 classify_data_or_mask_t * data;
2288 u8 *mp = 0, *dp = 0;
2289 vnet_classify_main_t * cm = &vnet_classify_main;
2290 vnet_classify_entry_t * e;
2293 u32 table_index = ~0;
2296 u32 memory_size = 64<<20;
2298 /* Default starting address 1.0.0.10 */
2299 src.as_u32 = clib_net_to_host_u32 (0x0100000A);
2301 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
2302 if (unformat (input, "sessions %d", &sessions))
2304 else if (unformat (input, "src %U", unformat_ip4_address, &src))
2306 else if (unformat (input, "buckets %d", &buckets))
2308 else if (unformat (input, "memory-size %uM", &tmp))
2309 memory_size = tmp<<20;
2310 else if (unformat (input, "memory-size %uG", &tmp))
2311 memory_size = tmp<<30;
2312 else if (unformat (input, "del"))
2314 else if (unformat (input, "table %d", &table_index))
2320 vec_validate_aligned (mp, 3 * sizeof(u32x4), sizeof(u32x4));
2321 vec_validate_aligned (dp, 3 * sizeof(u32x4), sizeof(u32x4));
2323 mask = (classify_data_or_mask_t *) mp;
2324 data = (classify_data_or_mask_t *) dp;
2326 data->ip.src_address.as_u32 = src.as_u32;
2328 /* Mask on src address */
2329 memset (&mask->ip.src_address, 0xff, 4);
2331 buckets = 1<<max_log2(buckets);
2333 if (table_index != ~0)
2335 if (pool_is_free_index (cm->tables, table_index))
2337 vlib_cli_output (vm, "No such table %d", table_index);
2340 t = pool_elt_at_index (cm->tables, table_index);
2347 t = vnet_classify_new_table (cm, (u8 *)mask, buckets,
2350 3 /* vectors to match */);
2351 t->miss_next_index = IP_LOOKUP_NEXT_DROP;
2352 vlib_cli_output (vm, "Create table %d", t - cm->tables);
2355 vlib_cli_output (vm, "Add %d sessions to %d buckets...",
2358 for (i = 0; i < sessions; i++)
2360 rv = vnet_classify_add_del_session (cm, t - cm->tables, (u8 *) data,
2361 IP_LOOKUP_NEXT_DROP,
2362 i+100 /* opaque_index */,
2363 0 /* advance */, 0, 0,
2367 clib_warning ("add: returned %d", rv);
2369 tmp = clib_net_to_host_u32 (data->ip.src_address.as_u32) + 1;
2370 data->ip.src_address.as_u32 = clib_net_to_host_u32 (tmp);
2377 vlib_cli_output (vm, "Must specify table index to delete sessions");
2381 vlib_cli_output (vm, "Try to delete %d sessions...", sessions);
2383 for (i = 0; i < sessions; i++)
2385 u8 * key_minus_skip;
2388 hash = vnet_classify_hash_packet (t, (u8 *) data);
2390 e = vnet_classify_find_entry (t, (u8 *) data, hash, 0 /* time_now */);
2391 /* Previous delete, perhaps... */
2394 ASSERT (e->opaque_index == (i+100));
2396 key_minus_skip = (u8 *)e->key;
2397 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
2399 rv = vnet_classify_add_del_session (cm, t - cm->tables, key_minus_skip,
2400 IP_LOOKUP_NEXT_DROP,
2401 i+100 /* opaque_index */,
2402 0 /* advance */, 0, 0,
2405 clib_warning ("del: returned %d", rv);
2407 tmp = clib_net_to_host_u32 (data->ip.src_address.as_u32) + 1;
2408 data->ip.src_address.as_u32 = clib_net_to_host_u32 (tmp);
2412 vlib_cli_output (vm, "Deleted %d sessions...", deleted);
2421 VLIB_CLI_COMMAND (test_classify_command, static) = {
2422 .path = "test classify",
2424 "test classify [src <ip>] [sessions <nn>] [buckets <nn>] [table <nn>] [del]",
2425 .function = test_classify_command_fn,
2427 #endif /* TEST_CODE */