2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
15 #include <vnet/classify/vnet_classify.h>
16 #include <vnet/classify/input_acl.h>
17 #include <vnet/ip/ip.h>
18 #include <vnet/api_errno.h> /* for API error numbers */
19 #include <vnet/l2/l2_classify.h> /* for L2_INPUT_CLASSIFY_NEXT_xxx */
21 vnet_classify_main_t vnet_classify_main;
23 #if VALIDATION_SCAFFOLDING
24 /* Validation scaffolding */
25 void mv (vnet_classify_table_t * t)
29 oldheap = clib_mem_set_heap (t->mheap);
31 clib_mem_set_heap (oldheap);
34 void rogue (vnet_classify_table_t * t)
37 vnet_classify_entry_t * v, * save_v;
38 u32 active_elements = 0;
39 vnet_classify_bucket_t * b;
41 for (i = 0; i < t->nbuckets; i++)
46 save_v = vnet_classify_get_entry (t, b->offset);
47 for (j = 0; j < (1<<b->log2_pages); j++)
49 for (k = 0; k < t->entries_per_page; k++)
51 v = vnet_classify_entry_at_index
52 (t, save_v, j*t->entries_per_page + k);
54 if (vnet_classify_entry_is_busy (v))
60 if (active_elements != t->active_elements)
61 clib_warning ("found %u expected %u elts", active_elements,
65 void mv (vnet_classify_table_t * t) { }
66 void rogue (vnet_classify_table_t * t) { }
69 void vnet_classify_register_unformat_l2_next_index_fn (unformat_function_t * fn)
71 vnet_classify_main_t * cm = &vnet_classify_main;
73 vec_add1 (cm->unformat_l2_next_index_fns, fn);
76 void vnet_classify_register_unformat_ip_next_index_fn (unformat_function_t * fn)
78 vnet_classify_main_t * cm = &vnet_classify_main;
80 vec_add1 (cm->unformat_ip_next_index_fns, fn);
84 vnet_classify_register_unformat_acl_next_index_fn (unformat_function_t * fn)
86 vnet_classify_main_t * cm = &vnet_classify_main;
88 vec_add1 (cm->unformat_acl_next_index_fns, fn);
92 vnet_classify_register_unformat_policer_next_index_fn (unformat_function_t * fn)
94 vnet_classify_main_t * cm = &vnet_classify_main;
96 vec_add1 (cm->unformat_policer_next_index_fns, fn);
99 void vnet_classify_register_unformat_opaque_index_fn (unformat_function_t * fn)
101 vnet_classify_main_t * cm = &vnet_classify_main;
103 vec_add1 (cm->unformat_opaque_index_fns, fn);
106 vnet_classify_table_t *
107 vnet_classify_new_table (vnet_classify_main_t *cm,
108 u8 * mask, u32 nbuckets, u32 memory_size,
112 vnet_classify_table_t * t;
115 nbuckets = 1 << (max_log2 (nbuckets));
117 pool_get_aligned (cm->tables, t, CLIB_CACHE_LINE_BYTES);
118 memset(t, 0, sizeof (*t));
120 vec_validate_aligned (t->mask, match_n_vectors - 1, sizeof(u32x4));
121 clib_memcpy (t->mask, mask, match_n_vectors * sizeof (u32x4));
123 t->next_table_index = ~0;
124 t->nbuckets = nbuckets;
125 t->log2_nbuckets = max_log2 (nbuckets);
126 t->match_n_vectors = match_n_vectors;
127 t->skip_n_vectors = skip_n_vectors;
128 t->entries_per_page = 2;
130 t->mheap = mheap_alloc (0 /* use VM */, memory_size);
132 vec_validate_aligned (t->buckets, nbuckets - 1, CLIB_CACHE_LINE_BYTES);
133 oldheap = clib_mem_set_heap (t->mheap);
135 t->writer_lock = clib_mem_alloc_aligned (CLIB_CACHE_LINE_BYTES,
136 CLIB_CACHE_LINE_BYTES);
137 t->writer_lock[0] = 0;
139 clib_mem_set_heap (oldheap);
143 void vnet_classify_delete_table_index (vnet_classify_main_t *cm,
146 vnet_classify_table_t * t;
148 /* Tolerate multiple frees, up to a point */
149 if (pool_is_free_index (cm->tables, table_index))
152 t = pool_elt_at_index (cm->tables, table_index);
153 if (t->next_table_index != ~0)
154 vnet_classify_delete_table_index (cm, t->next_table_index);
157 vec_free (t->buckets);
158 mheap_free (t->mheap);
160 pool_put (cm->tables, t);
163 static vnet_classify_entry_t *
164 vnet_classify_entry_alloc (vnet_classify_table_t * t, u32 log2_pages)
166 vnet_classify_entry_t * rv = 0;
168 vnet_classify_entry_##size##_t * rv##size = 0;
169 foreach_size_in_u32x4;
174 ASSERT (t->writer_lock[0]);
175 if (log2_pages >= vec_len (t->freelists) || t->freelists [log2_pages] == 0)
177 oldheap = clib_mem_set_heap (t->mheap);
179 vec_validate (t->freelists, log2_pages);
181 switch(t->match_n_vectors)
183 /* Euchre the vector allocator into allocating the right sizes */
186 vec_validate_aligned \
187 (rv##size, ((1<<log2_pages)*t->entries_per_page) - 1, \
188 CLIB_CACHE_LINE_BYTES); \
189 rv = (vnet_classify_entry_t *) rv##size; \
191 foreach_size_in_u32x4;
198 clib_mem_set_heap (oldheap);
201 rv = t->freelists[log2_pages];
202 t->freelists[log2_pages] = rv->next_free;
206 ASSERT (vec_len(rv) == (1<<log2_pages)*t->entries_per_page);
208 switch (t->match_n_vectors)
213 memset (rv, 0xff, sizeof (*rv##size) * vec_len(rv)); \
215 foreach_size_in_u32x4;
226 vnet_classify_entry_free (vnet_classify_table_t * t,
227 vnet_classify_entry_t * v)
231 ASSERT (t->writer_lock[0]);
233 free_list_index = min_log2(vec_len(v)/t->entries_per_page);
235 ASSERT(vec_len (t->freelists) > free_list_index);
237 v->next_free = t->freelists[free_list_index];
238 t->freelists[free_list_index] = v;
241 static inline void make_working_copy
242 (vnet_classify_table_t * t, vnet_classify_bucket_t * b)
244 vnet_classify_entry_t * v;
245 vnet_classify_bucket_t working_bucket __attribute__((aligned (8)));
247 vnet_classify_entry_t * working_copy;
249 vnet_classify_entry_##size##_t * working_copy##size = 0;
250 foreach_size_in_u32x4;
252 u32 cpu_number = os_get_cpu_number();
254 if (cpu_number >= vec_len (t->working_copies))
256 oldheap = clib_mem_set_heap (t->mheap);
257 vec_validate (t->working_copies, cpu_number);
258 clib_mem_set_heap (oldheap);
262 * working_copies are per-cpu so that near-simultaneous
263 * updates from multiple threads will not result in sporadic, spurious
266 working_copy = t->working_copies[cpu_number];
268 t->saved_bucket.as_u64 = b->as_u64;
269 oldheap = clib_mem_set_heap (t->mheap);
271 if ((1<<b->log2_pages)*t->entries_per_page > vec_len (working_copy))
273 switch(t->match_n_vectors)
275 /* Euchre the vector allocator into allocating the right sizes */
278 working_copy##size = (void *) working_copy; \
279 vec_validate_aligned \
280 (working_copy##size, \
281 ((1<<b->log2_pages)*t->entries_per_page) - 1, \
282 CLIB_CACHE_LINE_BYTES); \
283 working_copy = (void *) working_copy##size; \
285 foreach_size_in_u32x4;
291 t->working_copies[cpu_number] = working_copy;
294 _vec_len(working_copy) = (1<<b->log2_pages)*t->entries_per_page;
295 clib_mem_set_heap (oldheap);
297 v = vnet_classify_get_entry (t, b->offset);
299 switch(t->match_n_vectors)
303 clib_memcpy (working_copy, v, \
304 sizeof (vnet_classify_entry_##size##_t) \
305 * (1<<b->log2_pages) \
306 * (t->entries_per_page)); \
308 foreach_size_in_u32x4 ;
315 working_bucket.as_u64 = b->as_u64;
316 working_bucket.offset = vnet_classify_get_offset (t, working_copy);
317 CLIB_MEMORY_BARRIER();
318 b->as_u64 = working_bucket.as_u64;
319 t->working_copies[cpu_number] = working_copy;
322 static vnet_classify_entry_t *
323 split_and_rehash (vnet_classify_table_t * t,
324 vnet_classify_entry_t * old_values,
327 vnet_classify_entry_t * new_values, * v, * new_v;
330 new_values = vnet_classify_entry_alloc (t, new_log2_pages);
332 for (i = 0; i < (vec_len (old_values)/t->entries_per_page); i++)
336 for (j = 0; j < t->entries_per_page; j++)
338 v = vnet_classify_entry_at_index
339 (t, old_values, i * t->entries_per_page + j);
341 if (vnet_classify_entry_is_busy (v))
343 /* Hack so we can use the packet hash routine */
345 key_minus_skip = (u8 *) v->key;
346 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
348 new_hash = vnet_classify_hash_packet (t, key_minus_skip);
349 new_hash >>= t->log2_nbuckets;
350 new_hash &= (1<<new_log2_pages) - 1;
352 for (k = 0; k < t->entries_per_page; k++)
354 new_v = vnet_classify_entry_at_index (t, new_values,
357 if (vnet_classify_entry_is_free (new_v))
359 clib_memcpy (new_v, v, sizeof (vnet_classify_entry_t)
360 + (t->match_n_vectors * sizeof (u32x4)));
361 new_v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
365 /* Crap. Tell caller to try again */
366 vnet_classify_entry_free (t, new_values);
376 int vnet_classify_add_del (vnet_classify_table_t * t,
377 vnet_classify_entry_t * add_v,
381 vnet_classify_bucket_t * b, tmp_b;
382 vnet_classify_entry_t * v, * new_v, * save_new_v, * working_copy, * save_v;
388 u32 cpu_number = os_get_cpu_number();
391 ASSERT ((add_v->flags & VNET_CLASSIFY_ENTRY_FREE) == 0);
393 key_minus_skip = (u8 *) add_v->key;
394 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
396 hash = vnet_classify_hash_packet (t, key_minus_skip);
398 bucket_index = hash & (t->nbuckets-1);
399 b = &t->buckets[bucket_index];
401 hash >>= t->log2_nbuckets;
403 while (__sync_lock_test_and_set (t->writer_lock, 1))
406 /* First elt in the bucket? */
415 v = vnet_classify_entry_alloc (t, 0 /* new_log2_pages */);
416 clib_memcpy (v, add_v, sizeof (vnet_classify_entry_t) +
417 t->match_n_vectors * sizeof (u32x4));
418 v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
421 tmp_b.offset = vnet_classify_get_offset (t, v);
423 b->as_u64 = tmp_b.as_u64;
424 t->active_elements ++;
429 make_working_copy (t, b);
431 save_v = vnet_classify_get_entry (t, t->saved_bucket.offset);
432 value_index = hash & ((1<<t->saved_bucket.log2_pages)-1);
437 * For obvious (in hindsight) reasons, see if we're supposed to
438 * replace an existing key, then look for an empty slot.
441 for (i = 0; i < t->entries_per_page; i++)
443 v = vnet_classify_entry_at_index (t, save_v, value_index + i);
445 if (!memcmp (v->key, add_v->key, t->match_n_vectors * sizeof (u32x4)))
447 clib_memcpy (v, add_v, sizeof (vnet_classify_entry_t) +
448 t->match_n_vectors * sizeof(u32x4));
449 v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
451 CLIB_MEMORY_BARRIER();
452 /* Restore the previous (k,v) pairs */
453 b->as_u64 = t->saved_bucket.as_u64;
457 for (i = 0; i < t->entries_per_page; i++)
459 v = vnet_classify_entry_at_index (t, save_v, value_index + i);
461 if (vnet_classify_entry_is_free (v))
463 clib_memcpy (v, add_v, sizeof (vnet_classify_entry_t) +
464 t->match_n_vectors * sizeof(u32x4));
465 v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
466 CLIB_MEMORY_BARRIER();
467 b->as_u64 = t->saved_bucket.as_u64;
468 t->active_elements ++;
472 /* no room at the inn... split case... */
476 for (i = 0; i < t->entries_per_page; i++)
478 v = vnet_classify_entry_at_index (t, save_v, value_index + i);
480 if (!memcmp (v->key, add_v->key, t->match_n_vectors * sizeof (u32x4)))
482 memset (v, 0xff, sizeof (vnet_classify_entry_t) +
483 t->match_n_vectors * sizeof(u32x4));
484 v->flags |= VNET_CLASSIFY_ENTRY_FREE;
485 CLIB_MEMORY_BARRIER();
486 b->as_u64 = t->saved_bucket.as_u64;
487 t->active_elements --;
492 b->as_u64 = t->saved_bucket.as_u64;
496 new_log2_pages = t->saved_bucket.log2_pages + 1;
499 working_copy = t->working_copies[cpu_number];
500 new_v = split_and_rehash (t, working_copy, new_log2_pages);
508 /* Try to add the new entry */
511 key_minus_skip = (u8 *) add_v->key;
512 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
514 new_hash = vnet_classify_hash_packet_inline (t, key_minus_skip);
515 new_hash >>= t->log2_nbuckets;
516 new_hash &= (1<<min_log2((vec_len(new_v)/t->entries_per_page))) - 1;
518 for (i = 0; i < t->entries_per_page; i++)
520 new_v = vnet_classify_entry_at_index (t, save_new_v, new_hash + i);
522 if (vnet_classify_entry_is_free (new_v))
524 clib_memcpy (new_v, add_v, sizeof (vnet_classify_entry_t) +
525 t->match_n_vectors * sizeof(u32x4));
526 new_v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
530 /* Crap. Try again */
532 vnet_classify_entry_free (t, save_new_v);
536 tmp_b.log2_pages = min_log2 (vec_len (save_new_v)/t->entries_per_page);
537 tmp_b.offset = vnet_classify_get_offset (t, save_new_v);
538 CLIB_MEMORY_BARRIER();
539 b->as_u64 = tmp_b.as_u64;
540 t->active_elements ++;
541 v = vnet_classify_get_entry (t, t->saved_bucket.offset);
542 vnet_classify_entry_free (t, v);
545 CLIB_MEMORY_BARRIER();
546 t->writer_lock[0] = 0;
551 typedef CLIB_PACKED(struct {
552 ethernet_header_t eh;
554 }) classify_data_or_mask_t;
556 u64 vnet_classify_hash_packet (vnet_classify_table_t * t, u8 * h)
558 return vnet_classify_hash_packet_inline (t, h);
561 vnet_classify_entry_t *
562 vnet_classify_find_entry (vnet_classify_table_t * t,
563 u8 * h, u64 hash, f64 now)
565 return vnet_classify_find_entry_inline (t, h, hash, now);
568 static u8 * format_classify_entry (u8 * s, va_list * args)
570 vnet_classify_table_t * t = va_arg (*args, vnet_classify_table_t *);
571 vnet_classify_entry_t * e = va_arg (*args, vnet_classify_entry_t *);
574 (s, "[%u]: next_index %d advance %d opaque %d\n",
575 vnet_classify_get_offset (t, e), e->next_index, e->advance,
579 s = format (s, " k: %U\n", format_hex_bytes, e->key,
580 t->match_n_vectors * sizeof(u32x4));
582 if (vnet_classify_entry_is_busy (e))
583 s = format (s, " hits %lld, last_heard %.2f\n",
584 e->hits, e->last_heard);
586 s = format (s, " entry is free\n");
590 u8 * format_classify_table (u8 * s, va_list * args)
592 vnet_classify_table_t * t = va_arg (*args, vnet_classify_table_t *);
593 int verbose = va_arg (*args, int);
594 vnet_classify_bucket_t * b;
595 vnet_classify_entry_t * v, * save_v;
597 u64 active_elements = 0;
599 for (i = 0; i < t->nbuckets; i++)
605 s = format (s, "[%d]: empty\n", i);
611 s = format (s, "[%d]: heap offset %d, len %d\n", i,
612 b->offset, (1<<b->log2_pages));
615 save_v = vnet_classify_get_entry (t, b->offset);
616 for (j = 0; j < (1<<b->log2_pages); j++)
618 for (k = 0; k < t->entries_per_page; k++)
621 v = vnet_classify_entry_at_index (t, save_v,
622 j*t->entries_per_page + k);
624 if (vnet_classify_entry_is_free (v))
627 s = format (s, " %d: empty\n",
628 j * t->entries_per_page + k);
633 s = format (s, " %d: %U\n",
634 j * t->entries_per_page + k,
635 format_classify_entry, t, v);
642 s = format (s, " %lld active elements\n", active_elements);
643 s = format (s, " %d free lists\n", vec_len (t->freelists));
647 int vnet_classify_add_del_table (vnet_classify_main_t * cm,
653 u32 next_table_index,
658 vnet_classify_table_t * t;
663 if (memory_size == 0)
664 return VNET_API_ERROR_INVALID_MEMORY_SIZE;
667 return VNET_API_ERROR_INVALID_VALUE;
669 t = vnet_classify_new_table (cm, mask, nbuckets, memory_size,
671 t->next_table_index = next_table_index;
672 t->miss_next_index = miss_next_index;
673 *table_index = t - cm->tables;
677 vnet_classify_delete_table_index (cm, *table_index);
681 #define foreach_ip4_proto_field \
691 uword unformat_ip4_mask (unformat_input_t * input, va_list * args)
693 u8 ** maskp = va_arg (*args, u8 **);
695 u8 found_something = 0;
699 foreach_ip4_proto_field;
705 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
707 if (unformat (input, "version"))
709 else if (unformat (input, "hdr_length"))
711 else if (unformat (input, "src"))
713 else if (unformat (input, "dst"))
715 else if (unformat (input, "proto"))
718 #define _(a) else if (unformat (input, #a)) a=1;
719 foreach_ip4_proto_field
725 #define _(a) found_something += a;
726 foreach_ip4_proto_field;
729 if (found_something == 0)
732 vec_validate (mask, sizeof (*ip) - 1);
734 ip = (ip4_header_t *) mask;
736 #define _(a) if (a) memset (&ip->a, 0xff, sizeof (ip->a));
737 foreach_ip4_proto_field;
740 ip->ip_version_and_header_length = 0;
743 ip->ip_version_and_header_length |= 0xF0;
746 ip->ip_version_and_header_length |= 0x0F;
752 #define foreach_ip6_proto_field \
759 uword unformat_ip6_mask (unformat_input_t * input, va_list * args)
761 u8 ** maskp = va_arg (*args, u8 **);
763 u8 found_something = 0;
765 u32 ip_version_traffic_class_and_flow_label;
768 foreach_ip6_proto_field;
771 u8 traffic_class = 0;
774 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
776 if (unformat (input, "version"))
778 else if (unformat (input, "traffic-class"))
780 else if (unformat (input, "flow-label"))
782 else if (unformat (input, "src"))
784 else if (unformat (input, "dst"))
786 else if (unformat (input, "proto"))
789 #define _(a) else if (unformat (input, #a)) a=1;
790 foreach_ip6_proto_field
796 #define _(a) found_something += a;
797 foreach_ip6_proto_field;
800 if (found_something == 0)
803 vec_validate (mask, sizeof (*ip) - 1);
805 ip = (ip6_header_t *) mask;
807 #define _(a) if (a) memset (&ip->a, 0xff, sizeof (ip->a));
808 foreach_ip6_proto_field;
811 ip_version_traffic_class_and_flow_label = 0;
814 ip_version_traffic_class_and_flow_label |= 0xF0000000;
817 ip_version_traffic_class_and_flow_label |= 0x0FF00000;
820 ip_version_traffic_class_and_flow_label |= 0x000FFFFF;
822 ip->ip_version_traffic_class_and_flow_label =
823 clib_host_to_net_u32 (ip_version_traffic_class_and_flow_label);
829 uword unformat_l3_mask (unformat_input_t * input, va_list * args)
831 u8 ** maskp = va_arg (*args, u8 **);
833 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
834 if (unformat (input, "ip4 %U", unformat_ip4_mask, maskp))
836 else if (unformat (input, "ip6 %U", unformat_ip6_mask, maskp))
844 uword unformat_l2_mask (unformat_input_t * input, va_list * args)
846 u8 ** maskp = va_arg (*args, u8 **);
861 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
862 if (unformat (input, "src"))
864 else if (unformat (input, "dst"))
866 else if (unformat (input, "proto"))
868 else if (unformat (input, "tag1"))
870 else if (unformat (input, "tag2"))
872 else if (unformat (input, "ignore-tag1"))
874 else if (unformat (input, "ignore-tag2"))
876 else if (unformat (input, "cos1"))
878 else if (unformat (input, "cos2"))
880 else if (unformat (input, "dot1q"))
882 else if (unformat (input, "dot1ad"))
887 if ((src + dst + proto + tag1 + tag2 + dot1q + dot1ad +
888 ignore_tag1 + ignore_tag2 + cos1 + cos2) == 0)
891 if (tag1 || ignore_tag1 || cos1 || dot1q)
893 if (tag2 || ignore_tag2 || cos2 || dot1ad)
896 vec_validate (mask, len-1);
899 memset (mask, 0xff, 6);
902 memset (mask + 6, 0xff, 6);
915 mask[21] = mask [20] = 0xff;
936 mask[16] = mask [17] = 0xff;
945 mask[12] = mask [13] = 0xff;
951 uword unformat_classify_mask (unformat_input_t * input, va_list * args)
953 vnet_classify_main_t * CLIB_UNUSED(cm)
954 = va_arg (*args, vnet_classify_main_t *);
955 u8 ** maskp = va_arg (*args, u8 **);
956 u32 * skipp = va_arg (*args, u32 *);
957 u32 * matchp = va_arg (*args, u32 *);
964 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
965 if (unformat (input, "hex %U", unformat_hex_string, &mask))
967 else if (unformat (input, "l2 %U", unformat_l2_mask, &l2))
969 else if (unformat (input, "l3 %U", unformat_l3_mask, &l3))
975 if (mask || l2 || l3)
979 /* "With a free Ethernet header in every package" */
981 vec_validate (l2, 13);
985 vec_append (mask, l3);
990 /* Scan forward looking for the first significant mask octet */
991 for (i = 0; i < vec_len (mask); i++)
995 /* compute (skip, match) params */
996 *skipp = i / sizeof(u32x4);
997 vec_delete (mask, *skipp * sizeof(u32x4), 0);
999 /* Pad mask to an even multiple of the vector size */
1000 while (vec_len (mask) % sizeof (u32x4))
1003 match = vec_len (mask) / sizeof (u32x4);
1005 for (i = match*sizeof(u32x4); i > 0; i-= sizeof(u32x4))
1007 u64 *tmp = (u64 *)(mask + (i-sizeof(u32x4)));
1008 if (*tmp || *(tmp+1))
1013 clib_warning ("BUG: match 0");
1015 _vec_len (mask) = match * sizeof(u32x4);
1026 #define foreach_l2_input_next \
1028 _(ethernet, ETHERNET_INPUT) \
1033 uword unformat_l2_input_next_index (unformat_input_t * input, va_list * args)
1035 vnet_classify_main_t * cm = &vnet_classify_main;
1036 u32 * miss_next_indexp = va_arg (*args, u32 *);
1041 /* First try registered unformat fns, allowing override... */
1042 for (i = 0; i < vec_len (cm->unformat_l2_next_index_fns); i++)
1044 if (unformat (input, "%U", cm->unformat_l2_next_index_fns[i], &tmp))
1052 if (unformat (input, #n)) { next_index = L2_INPUT_CLASSIFY_NEXT_##N; goto out;}
1053 foreach_l2_input_next;
1056 if (unformat (input, "%d", &tmp))
1065 *miss_next_indexp = next_index;
1069 #define foreach_l2_output_next \
1072 uword unformat_l2_output_next_index (unformat_input_t * input, va_list * args)
1074 vnet_classify_main_t * cm = &vnet_classify_main;
1075 u32 * miss_next_indexp = va_arg (*args, u32 *);
1080 /* First try registered unformat fns, allowing override... */
1081 for (i = 0; i < vec_len (cm->unformat_l2_next_index_fns); i++)
1083 if (unformat (input, "%U", cm->unformat_l2_next_index_fns[i], &tmp))
1091 if (unformat (input, #n)) { next_index = L2_OUTPUT_CLASSIFY_NEXT_##N; goto out;}
1092 foreach_l2_output_next;
1095 if (unformat (input, "%d", &tmp))
1104 *miss_next_indexp = next_index;
1108 #define foreach_ip_next \
1112 uword unformat_ip_next_index (unformat_input_t * input, va_list * args)
1114 u32 * miss_next_indexp = va_arg (*args, u32 *);
1115 vnet_classify_main_t * cm = &vnet_classify_main;
1120 /* First try registered unformat fns, allowing override... */
1121 for (i = 0; i < vec_len (cm->unformat_ip_next_index_fns); i++)
1123 if (unformat (input, "%U", cm->unformat_ip_next_index_fns[i], &tmp))
1131 if (unformat (input, #n)) { next_index = IP_LOOKUP_NEXT_##N; goto out;}
1135 if (unformat (input, "%d", &tmp))
1144 *miss_next_indexp = next_index;
1148 #define foreach_acl_next \
1151 uword unformat_acl_next_index (unformat_input_t * input, va_list * args)
1153 u32 * next_indexp = va_arg (*args, u32 *);
1154 vnet_classify_main_t * cm = &vnet_classify_main;
1159 /* First try registered unformat fns, allowing override... */
1160 for (i = 0; i < vec_len (cm->unformat_acl_next_index_fns); i++)
1162 if (unformat (input, "%U", cm->unformat_acl_next_index_fns[i], &tmp))
1170 if (unformat (input, #n)) { next_index = ACL_NEXT_INDEX_##N; goto out;}
1174 if (unformat (input, "permit"))
1179 else if (unformat (input, "%d", &tmp))
1188 *next_indexp = next_index;
1192 uword unformat_policer_next_index (unformat_input_t * input, va_list * args)
1194 u32 * next_indexp = va_arg (*args, u32 *);
1195 vnet_classify_main_t * cm = &vnet_classify_main;
1200 /* First try registered unformat fns, allowing override... */
1201 for (i = 0; i < vec_len (cm->unformat_policer_next_index_fns); i++)
1203 if (unformat (input, "%U", cm->unformat_policer_next_index_fns[i], &tmp))
1210 if (unformat (input, "%d", &tmp))
1219 *next_indexp = next_index;
1223 static clib_error_t *
1224 classify_table_command_fn (vlib_main_t * vm,
1225 unformat_input_t * input,
1226 vlib_cli_command_t * cmd)
1232 u32 table_index = ~0;
1233 u32 next_table_index = ~0;
1234 u32 miss_next_index = ~0;
1235 u32 memory_size = 2<<20;
1239 vnet_classify_main_t * cm = &vnet_classify_main;
1242 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1243 if (unformat (input, "del"))
1245 else if (unformat (input, "buckets %d", &nbuckets))
1247 else if (unformat (input, "skip %d", &skip))
1249 else if (unformat (input, "match %d", &match))
1251 else if (unformat (input, "table %d", &table_index))
1253 else if (unformat (input, "mask %U", unformat_classify_mask,
1254 cm, &mask, &skip, &match))
1256 else if (unformat (input, "memory-size %uM", &tmp))
1257 memory_size = tmp<<20;
1258 else if (unformat (input, "memory-size %uG", &tmp))
1259 memory_size = tmp<<30;
1260 else if (unformat (input, "next-table %d", &next_table_index))
1262 else if (unformat (input, "miss-next %U", unformat_ip_next_index,
1265 else if (unformat (input, "l2-input-miss-next %U", unformat_l2_input_next_index,
1268 else if (unformat (input, "l2-output-miss-next %U", unformat_l2_output_next_index,
1271 else if (unformat (input, "acl-miss-next %U", unformat_acl_next_index,
1279 if (is_add && mask == 0)
1280 return clib_error_return (0, "Mask required");
1282 if (is_add && skip == ~0)
1283 return clib_error_return (0, "skip count required");
1285 if (is_add && match == ~0)
1286 return clib_error_return (0, "match count required");
1288 if (!is_add && table_index == ~0)
1289 return clib_error_return (0, "table index required for delete");
1291 rv = vnet_classify_add_del_table (cm, mask, nbuckets, memory_size,
1292 skip, match, next_table_index, miss_next_index,
1293 &table_index, is_add);
1300 return clib_error_return (0, "vnet_classify_add_del_table returned %d",
1306 VLIB_CLI_COMMAND (classify_table, static) = {
1307 .path = "classify table",
1309 "classify table [miss-next|l2-miss_next|acl-miss-next <next_index>]"
1310 "\n mask <mask-value> buckets <nn> [skip <n>] [match <n>] [del]",
1311 .function = classify_table_command_fn,
1314 static u8 * format_vnet_classify_table (u8 * s, va_list * args)
1316 vnet_classify_main_t * cm = va_arg (*args, vnet_classify_main_t *);
1317 int verbose = va_arg (*args, int);
1318 u32 index = va_arg (*args, u32);
1319 vnet_classify_table_t * t;
1323 s = format (s, "%10s%10s%10s%10s", "TableIdx", "Sessions", "NextTbl",
1324 "NextNode", verbose ? "Details" : "");
1328 t = pool_elt_at_index (cm->tables, index);
1329 s = format (s, "%10u%10d%10d%10d", index, t->active_elements,
1330 t->next_table_index, t->miss_next_index);
1332 s = format (s, "\n Heap: %U", format_mheap, t->mheap, 0 /*verbose*/);
1334 s = format (s, "\n nbuckets %d, skip %d match %d",
1335 t->nbuckets, t->skip_n_vectors, t->match_n_vectors);
1336 s = format (s, "\n mask %U", format_hex_bytes, t->mask,
1337 t->match_n_vectors * sizeof (u32x4));
1342 s = format (s, "\n%U", format_classify_table, t, verbose);
1347 static clib_error_t *
1348 show_classify_tables_command_fn (vlib_main_t * vm,
1349 unformat_input_t * input,
1350 vlib_cli_command_t * cmd)
1352 vnet_classify_main_t * cm = &vnet_classify_main;
1353 vnet_classify_table_t * t;
1354 u32 match_index = ~0;
1359 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1361 if (unformat (input, "index %d", &match_index))
1363 else if (unformat (input, "verbose %d", &verbose))
1365 else if (unformat (input, "verbose"))
1371 pool_foreach (t, cm->tables,
1373 if (match_index == ~0 || (match_index == t - cm->tables))
1374 vec_add1 (indices, t - cm->tables);
1377 if (vec_len(indices))
1379 vlib_cli_output (vm, "%U", format_vnet_classify_table, cm, verbose,
1381 for (i = 0; i < vec_len (indices); i++)
1382 vlib_cli_output (vm, "%U", format_vnet_classify_table, cm,
1383 verbose, indices[i]);
1386 vlib_cli_output (vm, "No classifier tables configured");
1393 VLIB_CLI_COMMAND (show_classify_table_command, static) = {
1394 .path = "show classify tables",
1395 .short_help = "show classify tables [index <nn>]",
1396 .function = show_classify_tables_command_fn,
1399 uword unformat_ip4_match (unformat_input_t * input, va_list * args)
1401 u8 ** matchp = va_arg (*args, u8 **);
1408 int src = 0, dst = 0;
1409 ip4_address_t src_val, dst_val;
1416 int fragment_id = 0;
1417 u32 fragment_id_val;
1423 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1425 if (unformat (input, "version %d", &version_val))
1427 else if (unformat (input, "hdr_length %d", &hdr_length_val))
1429 else if (unformat (input, "src %U", unformat_ip4_address, &src_val))
1431 else if (unformat (input, "dst %U", unformat_ip4_address, &dst_val))
1433 else if (unformat (input, "proto %d", &proto_val))
1435 else if (unformat (input, "tos %d", &tos_val))
1437 else if (unformat (input, "length %d", &length_val))
1439 else if (unformat (input, "fragment_id %d", &fragment_id_val))
1441 else if (unformat (input, "ttl %d", &ttl_val))
1443 else if (unformat (input, "checksum %d", &checksum_val))
1449 if (version + hdr_length + src + dst + proto + tos + length + fragment_id
1450 + ttl + checksum == 0)
1454 * Aligned because we use the real comparison functions
1456 vec_validate_aligned (match, sizeof (*ip) - 1, sizeof(u32x4));
1458 ip = (ip4_header_t *) match;
1460 /* These are realistically matched in practice */
1462 ip->src_address.as_u32 = src_val.as_u32;
1465 ip->dst_address.as_u32 = dst_val.as_u32;
1468 ip->protocol = proto_val;
1471 /* These are not, but they're included for completeness */
1473 ip->ip_version_and_header_length |= (version_val & 0xF)<<4;
1476 ip->ip_version_and_header_length |= (hdr_length_val & 0xF);
1482 ip->length = length_val;
1488 ip->checksum = checksum_val;
1494 uword unformat_ip6_match (unformat_input_t * input, va_list * args)
1496 u8 ** matchp = va_arg (*args, u8 **);
1501 u8 traffic_class = 0;
1502 u32 traffic_class_val;
1505 int src = 0, dst = 0;
1506 ip6_address_t src_val, dst_val;
1509 int payload_length = 0;
1510 u32 payload_length_val;
1513 u32 ip_version_traffic_class_and_flow_label;
1515 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1517 if (unformat (input, "version %d", &version_val))
1519 else if (unformat (input, "traffic_class %d", &traffic_class_val))
1521 else if (unformat (input, "flow_label %d", &flow_label_val))
1523 else if (unformat (input, "src %U", unformat_ip6_address, &src_val))
1525 else if (unformat (input, "dst %U", unformat_ip6_address, &dst_val))
1527 else if (unformat (input, "proto %d", &proto_val))
1529 else if (unformat (input, "payload_length %d", &payload_length_val))
1531 else if (unformat (input, "hop_limit %d", &hop_limit_val))
1537 if (version + traffic_class + flow_label + src + dst + proto +
1538 payload_length + hop_limit == 0)
1542 * Aligned because we use the real comparison functions
1544 vec_validate_aligned (match, sizeof (*ip) - 1, sizeof(u32x4));
1546 ip = (ip6_header_t *) match;
1549 clib_memcpy (&ip->src_address, &src_val, sizeof (ip->src_address));
1552 clib_memcpy (&ip->dst_address, &dst_val, sizeof (ip->dst_address));
1555 ip->protocol = proto_val;
1557 ip_version_traffic_class_and_flow_label = 0;
1560 ip_version_traffic_class_and_flow_label |= (version_val & 0xF) << 28;
1563 ip_version_traffic_class_and_flow_label |= (traffic_class_val & 0xFF) << 20;
1566 ip_version_traffic_class_and_flow_label |= (flow_label_val & 0xFFFFF);
1568 ip->ip_version_traffic_class_and_flow_label =
1569 clib_host_to_net_u32 (ip_version_traffic_class_and_flow_label);
1572 ip->payload_length = clib_host_to_net_u16 (payload_length_val);
1575 ip->hop_limit = hop_limit_val;
1581 uword unformat_l3_match (unformat_input_t * input, va_list * args)
1583 u8 ** matchp = va_arg (*args, u8 **);
1585 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1586 if (unformat (input, "ip4 %U", unformat_ip4_match, matchp))
1588 else if (unformat (input, "ip6 %U", unformat_ip6_match, matchp))
1597 uword unformat_vlan_tag (unformat_input_t * input, va_list * args)
1599 u8 * tagp = va_arg (*args, u8 *);
1602 if (unformat(input, "%d", &tag))
1604 tagp[0] = (tag>>8) & 0x0F;
1605 tagp[1] = tag & 0xFF;
1612 uword unformat_l2_match (unformat_input_t * input, va_list * args)
1614 u8 ** matchp = va_arg (*args, u8 **);
1634 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1635 if (unformat (input, "src %U", unformat_ethernet_address, &src_val))
1637 else if (unformat (input, "dst %U", unformat_ethernet_address, &dst_val))
1639 else if (unformat (input, "proto %U",
1640 unformat_ethernet_type_host_byte_order, &proto_val))
1642 else if (unformat (input, "tag1 %U", unformat_vlan_tag, tag1_val))
1644 else if (unformat (input, "tag2 %U", unformat_vlan_tag, tag2_val))
1646 else if (unformat (input, "ignore-tag1"))
1648 else if (unformat (input, "ignore-tag2"))
1650 else if (unformat (input, "cos1 %d", &cos1_val))
1652 else if (unformat (input, "cos2 %d", &cos2_val))
1657 if ((src + dst + proto + tag1 + tag2 +
1658 ignore_tag1 + ignore_tag2 + cos1 + cos2) == 0)
1661 if (tag1 || ignore_tag1 || cos1)
1663 if (tag2 || ignore_tag2 || cos2)
1666 vec_validate_aligned (match, len-1, sizeof(u32x4));
1669 clib_memcpy (match, dst_val, 6);
1672 clib_memcpy (match + 6, src_val, 6);
1676 /* inner vlan tag */
1677 match[19] = tag2_val[1];
1678 match[18] = tag2_val[0];
1680 match [18] |= (cos2_val & 0x7) << 5;
1683 match[21] = proto_val & 0xff;
1684 match[20] = proto_val >> 8;
1688 match [15] = tag1_val[1];
1689 match [14] = tag1_val[0];
1692 match [14] |= (cos1_val & 0x7) << 5;
1698 match [15] = tag1_val[1];
1699 match [14] = tag1_val[0];
1702 match[17] = proto_val & 0xff;
1703 match[16] = proto_val >> 8;
1706 match [14] |= (cos1_val & 0x7) << 5;
1712 match [18] |= (cos2_val & 0x7) << 5;
1714 match [14] |= (cos1_val & 0x7) << 5;
1717 match[13] = proto_val & 0xff;
1718 match[12] = proto_val >> 8;
1726 uword unformat_classify_match (unformat_input_t * input, va_list * args)
1728 vnet_classify_main_t * cm = va_arg (*args, vnet_classify_main_t *);
1729 u8 ** matchp = va_arg (*args, u8 **);
1730 u32 table_index = va_arg (*args, u32);
1731 vnet_classify_table_t * t;
1737 if (pool_is_free_index (cm->tables, table_index))
1740 t = pool_elt_at_index (cm->tables, table_index);
1742 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1743 if (unformat (input, "hex %U", unformat_hex_string, &match))
1745 else if (unformat (input, "l2 %U", unformat_l2_match, &l2))
1747 else if (unformat (input, "l3 %U", unformat_l3_match, &l3))
1753 if (match || l2 || l3)
1757 /* "Win a free Ethernet header in every packet" */
1759 vec_validate_aligned (l2, 13, sizeof(u32x4));
1763 vec_append_aligned (match, l3, sizeof(u32x4));
1768 /* Make sure the vector is big enough even if key is all 0's */
1769 vec_validate_aligned
1770 (match, ((t->match_n_vectors + t->skip_n_vectors) * sizeof(u32x4)) - 1,
1773 /* Set size, include skipped vectors*/
1774 _vec_len (match) = (t->match_n_vectors+t->skip_n_vectors) * sizeof(u32x4);
1784 int vnet_classify_add_del_session (vnet_classify_main_t * cm,
1792 vnet_classify_table_t * t;
1793 vnet_classify_entry_5_t _max_e __attribute__((aligned (16)));
1794 vnet_classify_entry_t * e;
1797 if (pool_is_free_index (cm->tables, table_index))
1798 return VNET_API_ERROR_NO_SUCH_TABLE;
1800 t = pool_elt_at_index (cm->tables, table_index);
1802 e = (vnet_classify_entry_t *)&_max_e;
1803 e->next_index = hit_next_index;
1804 e->opaque_index = opaque_index;
1805 e->advance = advance;
1810 /* Copy key data, honoring skip_n_vectors */
1811 clib_memcpy (&e->key, match + t->skip_n_vectors * sizeof (u32x4),
1812 t->match_n_vectors * sizeof (u32x4));
1814 /* Clear don't-care bits; likely when dynamically creating sessions */
1815 for (i = 0; i < t->match_n_vectors; i++)
1816 e->key[i] &= t->mask[i];
1818 rv = vnet_classify_add_del (t, e, is_add);
1820 return VNET_API_ERROR_NO_SUCH_ENTRY;
1824 static clib_error_t *
1825 classify_session_command_fn (vlib_main_t * vm,
1826 unformat_input_t * input,
1827 vlib_cli_command_t * cmd)
1829 vnet_classify_main_t * cm = &vnet_classify_main;
1831 u32 table_index = ~0;
1832 u32 hit_next_index = ~0;
1833 u64 opaque_index = ~0;
1838 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1840 if (unformat (input, "del"))
1842 else if (unformat (input, "hit-next %U", unformat_ip_next_index,
1845 else if (unformat (input, "l2-input-hit-next %U", unformat_l2_input_next_index,
1848 else if (unformat (input, "l2-output-hit-next %U", unformat_l2_output_next_index,
1851 else if (unformat (input, "acl-hit-next %U", unformat_acl_next_index,
1854 else if (unformat (input, "policer-hit-next %U",
1855 unformat_policer_next_index, &hit_next_index))
1857 else if (unformat (input, "opaque-index %lld", &opaque_index))
1859 else if (unformat (input, "match %U", unformat_classify_match,
1860 cm, &match, table_index))
1862 else if (unformat (input, "advance %d", &advance))
1864 else if (unformat (input, "table-index %d", &table_index))
1868 /* Try registered opaque-index unformat fns */
1869 for (i = 0; i < vec_len (cm->unformat_opaque_index_fns); i++)
1871 if (unformat (input, "%U", cm->unformat_opaque_index_fns[i],
1881 if (table_index == ~0)
1882 return clib_error_return (0, "Table index required");
1884 if (is_add && match == 0)
1885 return clib_error_return (0, "Match value required");
1887 rv = vnet_classify_add_del_session (cm, table_index, match,
1889 opaque_index, advance, is_add);
1897 return clib_error_return (0, "vnet_classify_add_del_session returned %d",
1904 VLIB_CLI_COMMAND (classify_session_command, static) = {
1905 .path = "classify session",
1907 "classify session [hit-next|l2-hit-next|acl-hit-next <next_index>|"
1908 "policer-hit-next <policer_name>]"
1909 "\n table-index <nn> match [hex] [l2] [l3 ip4] [opaque-index <index>]",
1910 .function = classify_session_command_fn,
1914 unformat_opaque_sw_if_index (unformat_input_t * input, va_list * args)
1916 u64 * opaquep = va_arg (*args, u64 *);
1919 if (unformat (input, "opaque-sw_if_index %U", unformat_vnet_sw_interface,
1920 vnet_get_main(), &sw_if_index))
1922 *opaquep = sw_if_index;
1929 unformat_ip_next_node (unformat_input_t * input, va_list * args)
1931 vnet_classify_main_t * cm = &vnet_classify_main;
1932 u32 * next_indexp = va_arg (*args, u32 *);
1936 if (unformat (input, "node %U", unformat_vlib_node,
1937 cm->vlib_main, &node_index))
1939 rv = next_index = vlib_node_add_next
1940 (cm->vlib_main, ip4_classify_node.index, node_index);
1941 next_index = vlib_node_add_next
1942 (cm->vlib_main, ip6_classify_node.index, node_index);
1943 ASSERT(rv == next_index);
1945 *next_indexp = next_index;
1952 unformat_acl_next_node (unformat_input_t * input, va_list * args)
1954 vnet_classify_main_t * cm = &vnet_classify_main;
1955 u32 * next_indexp = va_arg (*args, u32 *);
1959 if (unformat (input, "node %U", unformat_vlib_node,
1960 cm->vlib_main, &node_index))
1962 rv = next_index = vlib_node_add_next
1963 (cm->vlib_main, ip4_inacl_node.index, node_index);
1964 next_index = vlib_node_add_next
1965 (cm->vlib_main, ip6_inacl_node.index, node_index);
1966 ASSERT(rv == next_index);
1968 *next_indexp = next_index;
1975 unformat_l2_input_next_node (unformat_input_t * input, va_list * args)
1977 vnet_classify_main_t * cm = &vnet_classify_main;
1978 u32 * next_indexp = va_arg (*args, u32 *);
1982 if (unformat (input, "input-node %U", unformat_vlib_node,
1983 cm->vlib_main, &node_index))
1985 next_index = vlib_node_add_next
1986 (cm->vlib_main, l2_input_classify_node.index, node_index);
1988 *next_indexp = next_index;
1995 unformat_l2_output_next_node (unformat_input_t * input, va_list * args)
1997 vnet_classify_main_t * cm = &vnet_classify_main;
1998 u32 * next_indexp = va_arg (*args, u32 *);
2002 if (unformat (input, "output-node %U", unformat_vlib_node,
2003 cm->vlib_main, &node_index))
2005 next_index = vlib_node_add_next
2006 (cm->vlib_main, l2_output_classify_node.index, node_index);
2008 *next_indexp = next_index;
2014 static clib_error_t *
2015 vnet_classify_init (vlib_main_t * vm)
2017 vnet_classify_main_t * cm = &vnet_classify_main;
2020 cm->vnet_main = vnet_get_main();
2022 vnet_classify_register_unformat_opaque_index_fn
2023 (unformat_opaque_sw_if_index);
2025 vnet_classify_register_unformat_ip_next_index_fn
2026 (unformat_ip_next_node);
2028 vnet_classify_register_unformat_l2_next_index_fn
2029 (unformat_l2_input_next_node);
2031 vnet_classify_register_unformat_l2_next_index_fn
2032 (unformat_l2_input_next_node);
2034 vnet_classify_register_unformat_l2_next_index_fn
2035 (unformat_l2_output_next_node);
2037 vnet_classify_register_unformat_acl_next_index_fn
2038 (unformat_acl_next_node);
2043 VLIB_INIT_FUNCTION (vnet_classify_init);
2048 static clib_error_t *
2049 test_classify_command_fn (vlib_main_t * vm,
2050 unformat_input_t * input,
2051 vlib_cli_command_t * cmd)
2056 vnet_classify_table_t * t = 0;
2057 classify_data_or_mask_t * mask;
2058 classify_data_or_mask_t * data;
2059 u8 *mp = 0, *dp = 0;
2060 vnet_classify_main_t * cm = &vnet_classify_main;
2061 vnet_classify_entry_t * e;
2064 u32 table_index = ~0;
2067 u32 memory_size = 64<<20;
2069 /* Default starting address 1.0.0.10 */
2070 src.as_u32 = clib_net_to_host_u32 (0x0100000A);
2072 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
2073 if (unformat (input, "sessions %d", &sessions))
2075 else if (unformat (input, "src %U", unformat_ip4_address, &src))
2077 else if (unformat (input, "buckets %d", &buckets))
2079 else if (unformat (input, "memory-size %uM", &tmp))
2080 memory_size = tmp<<20;
2081 else if (unformat (input, "memory-size %uG", &tmp))
2082 memory_size = tmp<<30;
2083 else if (unformat (input, "del"))
2085 else if (unformat (input, "table %d", &table_index))
2091 vec_validate_aligned (mp, 3 * sizeof(u32x4), sizeof(u32x4));
2092 vec_validate_aligned (dp, 3 * sizeof(u32x4), sizeof(u32x4));
2094 mask = (classify_data_or_mask_t *) mp;
2095 data = (classify_data_or_mask_t *) dp;
2097 data->ip.src_address.as_u32 = src.as_u32;
2099 /* Mask on src address */
2100 memset (&mask->ip.src_address, 0xff, 4);
2102 buckets = 1<<max_log2(buckets);
2104 if (table_index != ~0)
2106 if (pool_is_free_index (cm->tables, table_index))
2108 vlib_cli_output (vm, "No such table %d", table_index);
2111 t = pool_elt_at_index (cm->tables, table_index);
2118 t = vnet_classify_new_table (cm, (u8 *)mask, buckets,
2121 3 /* vectors to match */);
2122 t->miss_next_index = IP_LOOKUP_NEXT_DROP;
2123 vlib_cli_output (vm, "Create table %d", t - cm->tables);
2126 vlib_cli_output (vm, "Add %d sessions to %d buckets...",
2129 for (i = 0; i < sessions; i++)
2131 rv = vnet_classify_add_del_session (cm, t - cm->tables, (u8 *) data,
2132 IP_LOOKUP_NEXT_DROP,
2133 i+100 /* opaque_index */,
2138 clib_warning ("add: returned %d", rv);
2140 tmp = clib_net_to_host_u32 (data->ip.src_address.as_u32) + 1;
2141 data->ip.src_address.as_u32 = clib_net_to_host_u32 (tmp);
2148 vlib_cli_output (vm, "Must specify table index to delete sessions");
2152 vlib_cli_output (vm, "Try to delete %d sessions...", sessions);
2154 for (i = 0; i < sessions; i++)
2156 u8 * key_minus_skip;
2159 hash = vnet_classify_hash_packet (t, (u8 *) data);
2161 e = vnet_classify_find_entry (t, (u8 *) data, hash, 0 /* time_now */);
2162 /* Previous delete, perhaps... */
2165 ASSERT (e->opaque_index == (i+100));
2167 key_minus_skip = (u8 *)e->key;
2168 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
2170 rv = vnet_classify_add_del_session (cm, t - cm->tables, key_minus_skip,
2171 IP_LOOKUP_NEXT_DROP,
2172 i+100 /* opaque_index */,
2176 clib_warning ("del: returned %d", rv);
2178 tmp = clib_net_to_host_u32 (data->ip.src_address.as_u32) + 1;
2179 data->ip.src_address.as_u32 = clib_net_to_host_u32 (tmp);
2183 vlib_cli_output (vm, "Deleted %d sessions...", deleted);
2192 VLIB_CLI_COMMAND (test_classify_command, static) = {
2193 .path = "test classify",
2195 "test classify [src <ip>] [sessions <nn>] [buckets <nn>] [table <nn>] [del]",
2196 .function = test_classify_command_fn,
2198 #endif /* TEST_CODE */