2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
15 #include <vnet/classify/vnet_classify.h>
16 #include <vnet/classify/input_acl.h>
17 #include <vnet/ip/ip.h>
18 #include <vnet/api_errno.h> /* for API error numbers */
19 #include <vnet/l2/l2_classify.h> /* for L2_INPUT_CLASSIFY_NEXT_xxx */
21 vnet_classify_main_t vnet_classify_main;
23 #if VALIDATION_SCAFFOLDING
24 /* Validation scaffolding */
25 void mv (vnet_classify_table_t * t)
29 oldheap = clib_mem_set_heap (t->mheap);
31 clib_mem_set_heap (oldheap);
34 void rogue (vnet_classify_table_t * t)
37 vnet_classify_entry_t * v, * save_v;
38 u32 active_elements = 0;
39 vnet_classify_bucket_t * b;
41 for (i = 0; i < t->nbuckets; i++)
46 save_v = vnet_classify_get_entry (t, b->offset);
47 for (j = 0; j < (1<<b->log2_pages); j++)
49 for (k = 0; k < t->entries_per_page; k++)
51 v = vnet_classify_entry_at_index
52 (t, save_v, j*t->entries_per_page + k);
54 if (vnet_classify_entry_is_busy (v))
60 if (active_elements != t->active_elements)
61 clib_warning ("found %u expected %u elts", active_elements,
65 void mv (vnet_classify_table_t * t) { }
66 void rogue (vnet_classify_table_t * t) { }
69 void vnet_classify_register_unformat_l2_next_index_fn (unformat_function_t * fn)
71 vnet_classify_main_t * cm = &vnet_classify_main;
73 vec_add1 (cm->unformat_l2_next_index_fns, fn);
76 void vnet_classify_register_unformat_ip_next_index_fn (unformat_function_t * fn)
78 vnet_classify_main_t * cm = &vnet_classify_main;
80 vec_add1 (cm->unformat_ip_next_index_fns, fn);
84 vnet_classify_register_unformat_acl_next_index_fn (unformat_function_t * fn)
86 vnet_classify_main_t * cm = &vnet_classify_main;
88 vec_add1 (cm->unformat_acl_next_index_fns, fn);
92 vnet_classify_register_unformat_policer_next_index_fn (unformat_function_t * fn)
94 vnet_classify_main_t * cm = &vnet_classify_main;
96 vec_add1 (cm->unformat_policer_next_index_fns, fn);
99 void vnet_classify_register_unformat_opaque_index_fn (unformat_function_t * fn)
101 vnet_classify_main_t * cm = &vnet_classify_main;
103 vec_add1 (cm->unformat_opaque_index_fns, fn);
106 vnet_classify_table_t *
107 vnet_classify_new_table (vnet_classify_main_t *cm,
108 u8 * mask, u32 nbuckets, u32 memory_size,
112 vnet_classify_table_t * t;
115 nbuckets = 1 << (max_log2 (nbuckets));
117 pool_get_aligned (cm->tables, t, CLIB_CACHE_LINE_BYTES);
118 memset(t, 0, sizeof (*t));
120 vec_validate_aligned (t->mask, match_n_vectors - 1, sizeof(u32x4));
121 clib_memcpy (t->mask, mask, match_n_vectors * sizeof (u32x4));
123 t->next_table_index = ~0;
124 t->nbuckets = nbuckets;
125 t->log2_nbuckets = max_log2 (nbuckets);
126 t->match_n_vectors = match_n_vectors;
127 t->skip_n_vectors = skip_n_vectors;
128 t->entries_per_page = 2;
130 t->mheap = mheap_alloc (0 /* use VM */, memory_size);
132 vec_validate_aligned (t->buckets, nbuckets - 1, CLIB_CACHE_LINE_BYTES);
133 oldheap = clib_mem_set_heap (t->mheap);
135 t->writer_lock = clib_mem_alloc_aligned (CLIB_CACHE_LINE_BYTES,
136 CLIB_CACHE_LINE_BYTES);
137 t->writer_lock[0] = 0;
139 clib_mem_set_heap (oldheap);
143 void vnet_classify_delete_table_index (vnet_classify_main_t *cm,
146 vnet_classify_table_t * t;
148 /* Tolerate multiple frees, up to a point */
149 if (pool_is_free_index (cm->tables, table_index))
152 t = pool_elt_at_index (cm->tables, table_index);
153 if (t->next_table_index != ~0)
154 vnet_classify_delete_table_index (cm, t->next_table_index);
157 vec_free (t->buckets);
158 mheap_free (t->mheap);
160 pool_put (cm->tables, t);
163 static vnet_classify_entry_t *
164 vnet_classify_entry_alloc (vnet_classify_table_t * t, u32 log2_pages)
166 vnet_classify_entry_t * rv = 0;
168 vnet_classify_entry_##size##_t * rv##size = 0;
169 foreach_size_in_u32x4;
174 ASSERT (t->writer_lock[0]);
175 if (log2_pages >= vec_len (t->freelists) || t->freelists [log2_pages] == 0)
177 oldheap = clib_mem_set_heap (t->mheap);
179 vec_validate (t->freelists, log2_pages);
181 switch(t->match_n_vectors)
183 /* Euchre the vector allocator into allocating the right sizes */
186 vec_validate_aligned \
187 (rv##size, ((1<<log2_pages)*t->entries_per_page) - 1, \
188 CLIB_CACHE_LINE_BYTES); \
189 rv = (vnet_classify_entry_t *) rv##size; \
191 foreach_size_in_u32x4;
198 clib_mem_set_heap (oldheap);
201 rv = t->freelists[log2_pages];
202 t->freelists[log2_pages] = rv->next_free;
206 ASSERT (vec_len(rv) == (1<<log2_pages)*t->entries_per_page);
208 switch (t->match_n_vectors)
213 memset (rv, 0xff, sizeof (*rv##size) * vec_len(rv)); \
215 foreach_size_in_u32x4;
226 vnet_classify_entry_free (vnet_classify_table_t * t,
227 vnet_classify_entry_t * v)
231 ASSERT (t->writer_lock[0]);
233 free_list_index = min_log2(vec_len(v)/t->entries_per_page);
235 ASSERT(vec_len (t->freelists) > free_list_index);
237 v->next_free = t->freelists[free_list_index];
238 t->freelists[free_list_index] = v;
241 static inline void make_working_copy
242 (vnet_classify_table_t * t, vnet_classify_bucket_t * b)
244 vnet_classify_entry_t * v;
245 vnet_classify_bucket_t working_bucket __attribute__((aligned (8)));
247 vnet_classify_entry_t * working_copy;
249 vnet_classify_entry_##size##_t * working_copy##size = 0;
250 foreach_size_in_u32x4;
252 u32 cpu_number = os_get_cpu_number();
254 if (cpu_number >= vec_len (t->working_copies))
256 oldheap = clib_mem_set_heap (t->mheap);
257 vec_validate (t->working_copies, cpu_number);
258 clib_mem_set_heap (oldheap);
262 * working_copies are per-cpu so that near-simultaneous
263 * updates from multiple threads will not result in sporadic, spurious
266 working_copy = t->working_copies[cpu_number];
268 t->saved_bucket.as_u64 = b->as_u64;
269 oldheap = clib_mem_set_heap (t->mheap);
271 if ((1<<b->log2_pages)*t->entries_per_page > vec_len (working_copy))
273 switch(t->match_n_vectors)
275 /* Euchre the vector allocator into allocating the right sizes */
278 working_copy##size = (void *) working_copy; \
279 vec_validate_aligned \
280 (working_copy##size, \
281 ((1<<b->log2_pages)*t->entries_per_page) - 1, \
282 CLIB_CACHE_LINE_BYTES); \
283 working_copy = (void *) working_copy##size; \
285 foreach_size_in_u32x4;
291 t->working_copies[cpu_number] = working_copy;
294 _vec_len(working_copy) = (1<<b->log2_pages)*t->entries_per_page;
295 clib_mem_set_heap (oldheap);
297 v = vnet_classify_get_entry (t, b->offset);
299 switch(t->match_n_vectors)
303 clib_memcpy (working_copy, v, \
304 sizeof (vnet_classify_entry_##size##_t) \
305 * (1<<b->log2_pages) \
306 * (t->entries_per_page)); \
308 foreach_size_in_u32x4 ;
315 working_bucket.as_u64 = b->as_u64;
316 working_bucket.offset = vnet_classify_get_offset (t, working_copy);
317 CLIB_MEMORY_BARRIER();
318 b->as_u64 = working_bucket.as_u64;
319 t->working_copies[cpu_number] = working_copy;
322 static vnet_classify_entry_t *
323 split_and_rehash (vnet_classify_table_t * t,
324 vnet_classify_entry_t * old_values,
327 vnet_classify_entry_t * new_values, * v, * new_v;
330 new_values = vnet_classify_entry_alloc (t, new_log2_pages);
332 for (i = 0; i < (vec_len (old_values)/t->entries_per_page); i++)
336 for (j = 0; j < t->entries_per_page; j++)
338 v = vnet_classify_entry_at_index
339 (t, old_values, i * t->entries_per_page + j);
341 if (vnet_classify_entry_is_busy (v))
343 /* Hack so we can use the packet hash routine */
345 key_minus_skip = (u8 *) v->key;
346 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
348 new_hash = vnet_classify_hash_packet (t, key_minus_skip);
349 new_hash >>= t->log2_nbuckets;
350 new_hash &= (1<<new_log2_pages) - 1;
352 for (k = 0; k < t->entries_per_page; k++)
354 new_v = vnet_classify_entry_at_index (t, new_values,
357 if (vnet_classify_entry_is_free (new_v))
359 clib_memcpy (new_v, v, sizeof (vnet_classify_entry_t)
360 + (t->match_n_vectors * sizeof (u32x4)));
361 new_v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
365 /* Crap. Tell caller to try again */
366 vnet_classify_entry_free (t, new_values);
376 int vnet_classify_add_del (vnet_classify_table_t * t,
377 vnet_classify_entry_t * add_v,
381 vnet_classify_bucket_t * b, tmp_b;
382 vnet_classify_entry_t * v, * new_v, * save_new_v, * working_copy, * save_v;
388 u32 cpu_number = os_get_cpu_number();
391 ASSERT ((add_v->flags & VNET_CLASSIFY_ENTRY_FREE) == 0);
393 key_minus_skip = (u8 *) add_v->key;
394 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
396 hash = vnet_classify_hash_packet (t, key_minus_skip);
398 bucket_index = hash & (t->nbuckets-1);
399 b = &t->buckets[bucket_index];
401 hash >>= t->log2_nbuckets;
403 while (__sync_lock_test_and_set (t->writer_lock, 1))
406 /* First elt in the bucket? */
415 v = vnet_classify_entry_alloc (t, 0 /* new_log2_pages */);
416 clib_memcpy (v, add_v, sizeof (vnet_classify_entry_t) +
417 t->match_n_vectors * sizeof (u32x4));
418 v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
421 tmp_b.offset = vnet_classify_get_offset (t, v);
423 b->as_u64 = tmp_b.as_u64;
424 t->active_elements ++;
429 make_working_copy (t, b);
431 save_v = vnet_classify_get_entry (t, t->saved_bucket.offset);
432 value_index = hash & ((1<<t->saved_bucket.log2_pages)-1);
437 * For obvious (in hindsight) reasons, see if we're supposed to
438 * replace an existing key, then look for an empty slot.
441 for (i = 0; i < t->entries_per_page; i++)
443 v = vnet_classify_entry_at_index (t, save_v, value_index + i);
445 if (!memcmp (v->key, add_v->key, t->match_n_vectors * sizeof (u32x4)))
447 clib_memcpy (v, add_v, sizeof (vnet_classify_entry_t) +
448 t->match_n_vectors * sizeof(u32x4));
449 v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
451 CLIB_MEMORY_BARRIER();
452 /* Restore the previous (k,v) pairs */
453 b->as_u64 = t->saved_bucket.as_u64;
457 for (i = 0; i < t->entries_per_page; i++)
459 v = vnet_classify_entry_at_index (t, save_v, value_index + i);
461 if (vnet_classify_entry_is_free (v))
463 clib_memcpy (v, add_v, sizeof (vnet_classify_entry_t) +
464 t->match_n_vectors * sizeof(u32x4));
465 v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
466 CLIB_MEMORY_BARRIER();
467 b->as_u64 = t->saved_bucket.as_u64;
468 t->active_elements ++;
472 /* no room at the inn... split case... */
476 for (i = 0; i < t->entries_per_page; i++)
478 v = vnet_classify_entry_at_index (t, save_v, value_index + i);
480 if (!memcmp (v->key, add_v->key, t->match_n_vectors * sizeof (u32x4)))
482 memset (v, 0xff, sizeof (vnet_classify_entry_t) +
483 t->match_n_vectors * sizeof(u32x4));
484 v->flags |= VNET_CLASSIFY_ENTRY_FREE;
485 CLIB_MEMORY_BARRIER();
486 b->as_u64 = t->saved_bucket.as_u64;
487 t->active_elements --;
492 b->as_u64 = t->saved_bucket.as_u64;
496 new_log2_pages = t->saved_bucket.log2_pages + 1;
499 working_copy = t->working_copies[cpu_number];
500 new_v = split_and_rehash (t, working_copy, new_log2_pages);
508 /* Try to add the new entry */
511 key_minus_skip = (u8 *) add_v->key;
512 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
514 new_hash = vnet_classify_hash_packet_inline (t, key_minus_skip);
515 new_hash >>= t->log2_nbuckets;
516 new_hash &= (1<<min_log2((vec_len(new_v)/t->entries_per_page))) - 1;
518 for (i = 0; i < t->entries_per_page; i++)
520 new_v = vnet_classify_entry_at_index (t, save_new_v, new_hash + i);
522 if (vnet_classify_entry_is_free (new_v))
524 clib_memcpy (new_v, add_v, sizeof (vnet_classify_entry_t) +
525 t->match_n_vectors * sizeof(u32x4));
526 new_v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
530 /* Crap. Try again */
532 vnet_classify_entry_free (t, save_new_v);
536 tmp_b.log2_pages = min_log2 (vec_len (save_new_v)/t->entries_per_page);
537 tmp_b.offset = vnet_classify_get_offset (t, save_new_v);
538 CLIB_MEMORY_BARRIER();
539 b->as_u64 = tmp_b.as_u64;
540 t->active_elements ++;
541 v = vnet_classify_get_entry (t, t->saved_bucket.offset);
542 vnet_classify_entry_free (t, v);
545 CLIB_MEMORY_BARRIER();
546 t->writer_lock[0] = 0;
551 typedef CLIB_PACKED(struct {
552 ethernet_header_t eh;
554 }) classify_data_or_mask_t;
556 u64 vnet_classify_hash_packet (vnet_classify_table_t * t, u8 * h)
558 return vnet_classify_hash_packet_inline (t, h);
561 vnet_classify_entry_t *
562 vnet_classify_find_entry (vnet_classify_table_t * t,
563 u8 * h, u64 hash, f64 now)
565 return vnet_classify_find_entry_inline (t, h, hash, now);
568 static u8 * format_classify_entry (u8 * s, va_list * args)
570 vnet_classify_table_t * t = va_arg (*args, vnet_classify_table_t *);
571 vnet_classify_entry_t * e = va_arg (*args, vnet_classify_entry_t *);
574 (s, "[%u]: next_index %d advance %d opaque %d\n",
575 vnet_classify_get_offset (t, e), e->next_index, e->advance,
579 s = format (s, " k: %U\n", format_hex_bytes, e->key,
580 t->match_n_vectors * sizeof(u32x4));
582 if (vnet_classify_entry_is_busy (e))
583 s = format (s, " hits %lld, last_heard %.2f\n",
584 e->hits, e->last_heard);
586 s = format (s, " entry is free\n");
590 u8 * format_classify_table (u8 * s, va_list * args)
592 vnet_classify_table_t * t = va_arg (*args, vnet_classify_table_t *);
593 int verbose = va_arg (*args, int);
594 vnet_classify_bucket_t * b;
595 vnet_classify_entry_t * v, * save_v;
597 u64 active_elements = 0;
599 for (i = 0; i < t->nbuckets; i++)
605 s = format (s, "[%d]: empty\n", i);
611 s = format (s, "[%d]: heap offset %d, len %d\n", i,
612 b->offset, (1<<b->log2_pages));
615 save_v = vnet_classify_get_entry (t, b->offset);
616 for (j = 0; j < (1<<b->log2_pages); j++)
618 for (k = 0; k < t->entries_per_page; k++)
621 v = vnet_classify_entry_at_index (t, save_v,
622 j*t->entries_per_page + k);
624 if (vnet_classify_entry_is_free (v))
627 s = format (s, " %d: empty\n",
628 j * t->entries_per_page + k);
633 s = format (s, " %d: %U\n",
634 j * t->entries_per_page + k,
635 format_classify_entry, t, v);
642 s = format (s, " %lld active elements\n", active_elements);
643 s = format (s, " %d free lists\n", vec_len (t->freelists));
647 int vnet_classify_add_del_table (vnet_classify_main_t * cm,
653 u32 next_table_index,
658 vnet_classify_table_t * t;
663 if (memory_size == 0)
664 return VNET_API_ERROR_INVALID_MEMORY_SIZE;
667 return VNET_API_ERROR_INVALID_VALUE;
669 t = vnet_classify_new_table (cm, mask, nbuckets, memory_size,
671 t->next_table_index = next_table_index;
672 t->miss_next_index = miss_next_index;
673 *table_index = t - cm->tables;
677 vnet_classify_delete_table_index (cm, *table_index);
681 #define foreach_ip4_proto_field \
691 uword unformat_ip4_mask (unformat_input_t * input, va_list * args)
693 u8 ** maskp = va_arg (*args, u8 **);
695 u8 found_something = 0;
699 foreach_ip4_proto_field;
705 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
707 if (unformat (input, "version"))
709 else if (unformat (input, "hdr_length"))
711 else if (unformat (input, "src"))
713 else if (unformat (input, "dst"))
715 else if (unformat (input, "proto"))
718 #define _(a) else if (unformat (input, #a)) a=1;
719 foreach_ip4_proto_field
725 #define _(a) found_something += a;
726 foreach_ip4_proto_field;
729 if (found_something == 0)
732 vec_validate (mask, sizeof (*ip) - 1);
734 ip = (ip4_header_t *) mask;
736 #define _(a) if (a) memset (&ip->a, 0xff, sizeof (ip->a));
737 foreach_ip4_proto_field;
740 ip->ip_version_and_header_length = 0;
743 ip->ip_version_and_header_length |= 0xF0;
746 ip->ip_version_and_header_length |= 0x0F;
752 #define foreach_ip6_proto_field \
759 uword unformat_ip6_mask (unformat_input_t * input, va_list * args)
761 u8 ** maskp = va_arg (*args, u8 **);
763 u8 found_something = 0;
765 u32 ip_version_traffic_class_and_flow_label;
768 foreach_ip6_proto_field;
771 u8 traffic_class = 0;
774 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
776 if (unformat (input, "version"))
778 else if (unformat (input, "traffic-class"))
780 else if (unformat (input, "flow-label"))
782 else if (unformat (input, "src"))
784 else if (unformat (input, "dst"))
786 else if (unformat (input, "proto"))
789 #define _(a) else if (unformat (input, #a)) a=1;
790 foreach_ip6_proto_field
796 #define _(a) found_something += a;
797 foreach_ip6_proto_field;
800 if (found_something == 0)
803 vec_validate (mask, sizeof (*ip) - 1);
805 ip = (ip6_header_t *) mask;
807 #define _(a) if (a) memset (&ip->a, 0xff, sizeof (ip->a));
808 foreach_ip6_proto_field;
811 ip_version_traffic_class_and_flow_label = 0;
814 ip_version_traffic_class_and_flow_label |= 0xF0000000;
817 ip_version_traffic_class_and_flow_label |= 0x0FF00000;
820 ip_version_traffic_class_and_flow_label |= 0x000FFFFF;
822 ip->ip_version_traffic_class_and_flow_label =
823 clib_host_to_net_u32 (ip_version_traffic_class_and_flow_label);
829 uword unformat_l3_mask (unformat_input_t * input, va_list * args)
831 u8 ** maskp = va_arg (*args, u8 **);
833 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
834 if (unformat (input, "ip4 %U", unformat_ip4_mask, maskp))
836 else if (unformat (input, "ip6 %U", unformat_ip6_mask, maskp))
844 uword unformat_l2_mask (unformat_input_t * input, va_list * args)
846 u8 ** maskp = va_arg (*args, u8 **);
861 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
862 if (unformat (input, "src"))
864 else if (unformat (input, "dst"))
866 else if (unformat (input, "proto"))
868 else if (unformat (input, "tag1"))
870 else if (unformat (input, "tag2"))
872 else if (unformat (input, "ignore-tag1"))
874 else if (unformat (input, "ignore-tag2"))
876 else if (unformat (input, "cos1"))
878 else if (unformat (input, "cos2"))
880 else if (unformat (input, "dot1q"))
882 else if (unformat (input, "dot1ad"))
887 if ((src + dst + proto + tag1 + tag2 + dot1q + dot1ad +
888 ignore_tag1 + ignore_tag2 + cos1 + cos2) == 0)
891 if (tag1 || ignore_tag1 || cos1 || dot1q)
893 if (tag2 || ignore_tag2 || cos2 || dot1ad)
896 vec_validate (mask, len-1);
899 memset (mask, 0xff, 6);
902 memset (mask + 6, 0xff, 6);
915 mask[21] = mask [20] = 0xff;
936 mask[16] = mask [17] = 0xff;
945 mask[12] = mask [13] = 0xff;
951 uword unformat_classify_mask (unformat_input_t * input, va_list * args)
953 vnet_classify_main_t * CLIB_UNUSED(cm)
954 = va_arg (*args, vnet_classify_main_t *);
955 u8 ** maskp = va_arg (*args, u8 **);
956 u32 * skipp = va_arg (*args, u32 *);
957 u32 * matchp = va_arg (*args, u32 *);
964 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
965 if (unformat (input, "hex %U", unformat_hex_string, &mask))
967 else if (unformat (input, "l2 %U", unformat_l2_mask, &l2))
969 else if (unformat (input, "l3 %U", unformat_l3_mask, &l3))
975 if (mask || l2 || l3)
979 /* "With a free Ethernet header in every package" */
981 vec_validate (l2, 13);
985 vec_append (mask, l3);
990 /* Scan forward looking for the first significant mask octet */
991 for (i = 0; i < vec_len (mask); i++)
995 /* compute (skip, match) params */
996 *skipp = i / sizeof(u32x4);
997 vec_delete (mask, *skipp * sizeof(u32x4), 0);
999 /* Pad mask to an even multiple of the vector size */
1000 while (vec_len (mask) % sizeof (u32x4))
1003 match = vec_len (mask) / sizeof (u32x4);
1005 for (i = match*sizeof(u32x4); i > 0; i-= sizeof(u32x4))
1007 u64 *tmp = (u64 *)(mask + (i-sizeof(u32x4)));
1008 if (*tmp || *(tmp+1))
1013 clib_warning ("BUG: match 0");
1015 _vec_len (mask) = match * sizeof(u32x4);
1026 #define foreach_l2_input_next \
1028 _(ethernet, ETHERNET_INPUT) \
1033 uword unformat_l2_input_next_index (unformat_input_t * input, va_list * args)
1035 vnet_classify_main_t * cm = &vnet_classify_main;
1036 u32 * miss_next_indexp = va_arg (*args, u32 *);
1041 /* First try registered unformat fns, allowing override... */
1042 for (i = 0; i < vec_len (cm->unformat_l2_next_index_fns); i++)
1044 if (unformat (input, "%U", cm->unformat_l2_next_index_fns[i], &tmp))
1052 if (unformat (input, #n)) { next_index = L2_INPUT_CLASSIFY_NEXT_##N; goto out;}
1053 foreach_l2_input_next;
1056 if (unformat (input, "%d", &tmp))
1065 *miss_next_indexp = next_index;
1069 #define foreach_l2_output_next \
1072 uword unformat_l2_output_next_index (unformat_input_t * input, va_list * args)
1074 vnet_classify_main_t * cm = &vnet_classify_main;
1075 u32 * miss_next_indexp = va_arg (*args, u32 *);
1080 /* First try registered unformat fns, allowing override... */
1081 for (i = 0; i < vec_len (cm->unformat_l2_next_index_fns); i++)
1083 if (unformat (input, "%U", cm->unformat_l2_next_index_fns[i], &tmp))
1091 if (unformat (input, #n)) { next_index = L2_OUTPUT_CLASSIFY_NEXT_##N; goto out;}
1092 foreach_l2_output_next;
1095 if (unformat (input, "%d", &tmp))
1104 *miss_next_indexp = next_index;
1108 #define foreach_ip_next \
1114 uword unformat_ip_next_index (unformat_input_t * input, va_list * args)
1116 u32 * miss_next_indexp = va_arg (*args, u32 *);
1117 vnet_classify_main_t * cm = &vnet_classify_main;
1122 /* First try registered unformat fns, allowing override... */
1123 for (i = 0; i < vec_len (cm->unformat_ip_next_index_fns); i++)
1125 if (unformat (input, "%U", cm->unformat_ip_next_index_fns[i], &tmp))
1133 if (unformat (input, #n)) { next_index = IP_LOOKUP_NEXT_##N; goto out;}
1137 if (unformat (input, "%d", &tmp))
1146 *miss_next_indexp = next_index;
1150 #define foreach_acl_next \
1153 uword unformat_acl_next_index (unformat_input_t * input, va_list * args)
1155 u32 * next_indexp = va_arg (*args, u32 *);
1156 vnet_classify_main_t * cm = &vnet_classify_main;
1161 /* First try registered unformat fns, allowing override... */
1162 for (i = 0; i < vec_len (cm->unformat_acl_next_index_fns); i++)
1164 if (unformat (input, "%U", cm->unformat_acl_next_index_fns[i], &tmp))
1172 if (unformat (input, #n)) { next_index = ACL_NEXT_INDEX_##N; goto out;}
1176 if (unformat (input, "permit"))
1181 else if (unformat (input, "%d", &tmp))
1190 *next_indexp = next_index;
1194 uword unformat_policer_next_index (unformat_input_t * input, va_list * args)
1196 u32 * next_indexp = va_arg (*args, u32 *);
1197 vnet_classify_main_t * cm = &vnet_classify_main;
1202 /* First try registered unformat fns, allowing override... */
1203 for (i = 0; i < vec_len (cm->unformat_policer_next_index_fns); i++)
1205 if (unformat (input, "%U", cm->unformat_policer_next_index_fns[i], &tmp))
1212 if (unformat (input, "%d", &tmp))
1221 *next_indexp = next_index;
1225 static clib_error_t *
1226 classify_table_command_fn (vlib_main_t * vm,
1227 unformat_input_t * input,
1228 vlib_cli_command_t * cmd)
1234 u32 table_index = ~0;
1235 u32 next_table_index = ~0;
1236 u32 miss_next_index = ~0;
1237 u32 memory_size = 2<<20;
1241 vnet_classify_main_t * cm = &vnet_classify_main;
1244 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1245 if (unformat (input, "del"))
1247 else if (unformat (input, "buckets %d", &nbuckets))
1249 else if (unformat (input, "skip %d", &skip))
1251 else if (unformat (input, "match %d", &match))
1253 else if (unformat (input, "table %d", &table_index))
1255 else if (unformat (input, "mask %U", unformat_classify_mask,
1256 cm, &mask, &skip, &match))
1258 else if (unformat (input, "memory-size %uM", &tmp))
1259 memory_size = tmp<<20;
1260 else if (unformat (input, "memory-size %uG", &tmp))
1261 memory_size = tmp<<30;
1262 else if (unformat (input, "next-table %d", &next_table_index))
1264 else if (unformat (input, "miss-next %U", unformat_ip_next_index,
1267 else if (unformat (input, "l2-input-miss-next %U", unformat_l2_input_next_index,
1270 else if (unformat (input, "l2-output-miss-next %U", unformat_l2_output_next_index,
1273 else if (unformat (input, "acl-miss-next %U", unformat_acl_next_index,
1281 if (is_add && mask == 0)
1282 return clib_error_return (0, "Mask required");
1284 if (is_add && skip == ~0)
1285 return clib_error_return (0, "skip count required");
1287 if (is_add && match == ~0)
1288 return clib_error_return (0, "match count required");
1290 if (!is_add && table_index == ~0)
1291 return clib_error_return (0, "table index required for delete");
1293 rv = vnet_classify_add_del_table (cm, mask, nbuckets, memory_size,
1294 skip, match, next_table_index, miss_next_index,
1295 &table_index, is_add);
1302 return clib_error_return (0, "vnet_classify_add_del_table returned %d",
1308 VLIB_CLI_COMMAND (classify_table, static) = {
1309 .path = "classify table",
1311 "classify table [miss-next|l2-miss_next|acl-miss-next <next_index>]"
1312 "\n mask <mask-value> buckets <nn> [skip <n>] [match <n>] [del]",
1313 .function = classify_table_command_fn,
1316 static u8 * format_vnet_classify_table (u8 * s, va_list * args)
1318 vnet_classify_main_t * cm = va_arg (*args, vnet_classify_main_t *);
1319 int verbose = va_arg (*args, int);
1320 u32 index = va_arg (*args, u32);
1321 vnet_classify_table_t * t;
1325 s = format (s, "%10s%10s%10s%10s", "TableIdx", "Sessions", "NextTbl",
1326 "NextNode", verbose ? "Details" : "");
1330 t = pool_elt_at_index (cm->tables, index);
1331 s = format (s, "%10u%10d%10d%10d", index, t->active_elements,
1332 t->next_table_index, t->miss_next_index);
1334 s = format (s, "\n Heap: %U", format_mheap, t->mheap, 0 /*verbose*/);
1336 s = format (s, "\n nbuckets %d, skip %d match %d",
1337 t->nbuckets, t->skip_n_vectors, t->match_n_vectors);
1338 s = format (s, "\n mask %U", format_hex_bytes, t->mask,
1339 t->match_n_vectors * sizeof (u32x4));
1344 s = format (s, "\n%U", format_classify_table, t, verbose);
1349 static clib_error_t *
1350 show_classify_tables_command_fn (vlib_main_t * vm,
1351 unformat_input_t * input,
1352 vlib_cli_command_t * cmd)
1354 vnet_classify_main_t * cm = &vnet_classify_main;
1355 vnet_classify_table_t * t;
1356 u32 match_index = ~0;
1361 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1363 if (unformat (input, "index %d", &match_index))
1365 else if (unformat (input, "verbose %d", &verbose))
1367 else if (unformat (input, "verbose"))
1373 pool_foreach (t, cm->tables,
1375 if (match_index == ~0 || (match_index == t - cm->tables))
1376 vec_add1 (indices, t - cm->tables);
1379 if (vec_len(indices))
1381 vlib_cli_output (vm, "%U", format_vnet_classify_table, cm, verbose,
1383 for (i = 0; i < vec_len (indices); i++)
1384 vlib_cli_output (vm, "%U", format_vnet_classify_table, cm,
1385 verbose, indices[i]);
1388 vlib_cli_output (vm, "No classifier tables configured");
1395 VLIB_CLI_COMMAND (show_classify_table_command, static) = {
1396 .path = "show classify tables",
1397 .short_help = "show classify tables [index <nn>]",
1398 .function = show_classify_tables_command_fn,
1401 uword unformat_ip4_match (unformat_input_t * input, va_list * args)
1403 u8 ** matchp = va_arg (*args, u8 **);
1410 int src = 0, dst = 0;
1411 ip4_address_t src_val, dst_val;
1418 int fragment_id = 0;
1419 u32 fragment_id_val;
1425 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1427 if (unformat (input, "version %d", &version_val))
1429 else if (unformat (input, "hdr_length %d", &hdr_length_val))
1431 else if (unformat (input, "src %U", unformat_ip4_address, &src_val))
1433 else if (unformat (input, "dst %U", unformat_ip4_address, &dst_val))
1435 else if (unformat (input, "proto %d", &proto_val))
1437 else if (unformat (input, "tos %d", &tos_val))
1439 else if (unformat (input, "length %d", &length_val))
1441 else if (unformat (input, "fragment_id %d", &fragment_id_val))
1443 else if (unformat (input, "ttl %d", &ttl_val))
1445 else if (unformat (input, "checksum %d", &checksum_val))
1451 if (version + hdr_length + src + dst + proto + tos + length + fragment_id
1452 + ttl + checksum == 0)
1456 * Aligned because we use the real comparison functions
1458 vec_validate_aligned (match, sizeof (*ip) - 1, sizeof(u32x4));
1460 ip = (ip4_header_t *) match;
1462 /* These are realistically matched in practice */
1464 ip->src_address.as_u32 = src_val.as_u32;
1467 ip->dst_address.as_u32 = dst_val.as_u32;
1470 ip->protocol = proto_val;
1473 /* These are not, but they're included for completeness */
1475 ip->ip_version_and_header_length |= (version_val & 0xF)<<4;
1478 ip->ip_version_and_header_length |= (hdr_length_val & 0xF);
1484 ip->length = length_val;
1490 ip->checksum = checksum_val;
1496 uword unformat_ip6_match (unformat_input_t * input, va_list * args)
1498 u8 ** matchp = va_arg (*args, u8 **);
1503 u8 traffic_class = 0;
1504 u32 traffic_class_val;
1507 int src = 0, dst = 0;
1508 ip6_address_t src_val, dst_val;
1511 int payload_length = 0;
1512 u32 payload_length_val;
1515 u32 ip_version_traffic_class_and_flow_label;
1517 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1519 if (unformat (input, "version %d", &version_val))
1521 else if (unformat (input, "traffic_class %d", &traffic_class_val))
1523 else if (unformat (input, "flow_label %d", &flow_label_val))
1525 else if (unformat (input, "src %U", unformat_ip6_address, &src_val))
1527 else if (unformat (input, "dst %U", unformat_ip6_address, &dst_val))
1529 else if (unformat (input, "proto %d", &proto_val))
1531 else if (unformat (input, "payload_length %d", &payload_length_val))
1533 else if (unformat (input, "hop_limit %d", &hop_limit_val))
1539 if (version + traffic_class + flow_label + src + dst + proto +
1540 payload_length + hop_limit == 0)
1544 * Aligned because we use the real comparison functions
1546 vec_validate_aligned (match, sizeof (*ip) - 1, sizeof(u32x4));
1548 ip = (ip6_header_t *) match;
1551 clib_memcpy (&ip->src_address, &src_val, sizeof (ip->src_address));
1554 clib_memcpy (&ip->dst_address, &dst_val, sizeof (ip->dst_address));
1557 ip->protocol = proto_val;
1559 ip_version_traffic_class_and_flow_label = 0;
1562 ip_version_traffic_class_and_flow_label |= (version_val & 0xF) << 28;
1565 ip_version_traffic_class_and_flow_label |= (traffic_class_val & 0xFF) << 20;
1568 ip_version_traffic_class_and_flow_label |= (flow_label_val & 0xFFFFF);
1570 ip->ip_version_traffic_class_and_flow_label =
1571 clib_host_to_net_u32 (ip_version_traffic_class_and_flow_label);
1574 ip->payload_length = clib_host_to_net_u16 (payload_length_val);
1577 ip->hop_limit = hop_limit_val;
1583 uword unformat_l3_match (unformat_input_t * input, va_list * args)
1585 u8 ** matchp = va_arg (*args, u8 **);
1587 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1588 if (unformat (input, "ip4 %U", unformat_ip4_match, matchp))
1590 else if (unformat (input, "ip6 %U", unformat_ip6_match, matchp))
1599 uword unformat_vlan_tag (unformat_input_t * input, va_list * args)
1601 u8 * tagp = va_arg (*args, u8 *);
1604 if (unformat(input, "%d", &tag))
1606 tagp[0] = (tag>>8) & 0x0F;
1607 tagp[1] = tag & 0xFF;
1614 uword unformat_l2_match (unformat_input_t * input, va_list * args)
1616 u8 ** matchp = va_arg (*args, u8 **);
1636 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1637 if (unformat (input, "src %U", unformat_ethernet_address, &src_val))
1639 else if (unformat (input, "dst %U", unformat_ethernet_address, &dst_val))
1641 else if (unformat (input, "proto %U",
1642 unformat_ethernet_type_host_byte_order, &proto_val))
1644 else if (unformat (input, "tag1 %U", unformat_vlan_tag, tag1_val))
1646 else if (unformat (input, "tag2 %U", unformat_vlan_tag, tag2_val))
1648 else if (unformat (input, "ignore-tag1"))
1650 else if (unformat (input, "ignore-tag2"))
1652 else if (unformat (input, "cos1 %d", &cos1_val))
1654 else if (unformat (input, "cos2 %d", &cos2_val))
1659 if ((src + dst + proto + tag1 + tag2 +
1660 ignore_tag1 + ignore_tag2 + cos1 + cos2) == 0)
1663 if (tag1 || ignore_tag1 || cos1)
1665 if (tag2 || ignore_tag2 || cos2)
1668 vec_validate_aligned (match, len-1, sizeof(u32x4));
1671 clib_memcpy (match, dst_val, 6);
1674 clib_memcpy (match + 6, src_val, 6);
1678 /* inner vlan tag */
1679 match[19] = tag2_val[1];
1680 match[18] = tag2_val[0];
1682 match [18] |= (cos2_val & 0x7) << 5;
1685 match[21] = proto_val & 0xff;
1686 match[20] = proto_val >> 8;
1690 match [15] = tag1_val[1];
1691 match [14] = tag1_val[0];
1694 match [14] |= (cos1_val & 0x7) << 5;
1700 match [15] = tag1_val[1];
1701 match [14] = tag1_val[0];
1704 match[17] = proto_val & 0xff;
1705 match[16] = proto_val >> 8;
1708 match [14] |= (cos1_val & 0x7) << 5;
1714 match [18] |= (cos2_val & 0x7) << 5;
1716 match [14] |= (cos1_val & 0x7) << 5;
1719 match[13] = proto_val & 0xff;
1720 match[12] = proto_val >> 8;
1728 uword unformat_classify_match (unformat_input_t * input, va_list * args)
1730 vnet_classify_main_t * cm = va_arg (*args, vnet_classify_main_t *);
1731 u8 ** matchp = va_arg (*args, u8 **);
1732 u32 table_index = va_arg (*args, u32);
1733 vnet_classify_table_t * t;
1739 if (pool_is_free_index (cm->tables, table_index))
1742 t = pool_elt_at_index (cm->tables, table_index);
1744 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1745 if (unformat (input, "hex %U", unformat_hex_string, &match))
1747 else if (unformat (input, "l2 %U", unformat_l2_match, &l2))
1749 else if (unformat (input, "l3 %U", unformat_l3_match, &l3))
1755 if (match || l2 || l3)
1759 /* "Win a free Ethernet header in every packet" */
1761 vec_validate_aligned (l2, 13, sizeof(u32x4));
1765 vec_append_aligned (match, l3, sizeof(u32x4));
1770 /* Make sure the vector is big enough even if key is all 0's */
1771 vec_validate_aligned
1772 (match, ((t->match_n_vectors + t->skip_n_vectors) * sizeof(u32x4)) - 1,
1775 /* Set size, include skipped vectors*/
1776 _vec_len (match) = (t->match_n_vectors+t->skip_n_vectors) * sizeof(u32x4);
1786 int vnet_classify_add_del_session (vnet_classify_main_t * cm,
1794 vnet_classify_table_t * t;
1795 vnet_classify_entry_5_t _max_e __attribute__((aligned (16)));
1796 vnet_classify_entry_t * e;
1799 if (pool_is_free_index (cm->tables, table_index))
1800 return VNET_API_ERROR_NO_SUCH_TABLE;
1802 t = pool_elt_at_index (cm->tables, table_index);
1804 e = (vnet_classify_entry_t *)&_max_e;
1805 e->next_index = hit_next_index;
1806 e->opaque_index = opaque_index;
1807 e->advance = advance;
1812 /* Copy key data, honoring skip_n_vectors */
1813 clib_memcpy (&e->key, match + t->skip_n_vectors * sizeof (u32x4),
1814 t->match_n_vectors * sizeof (u32x4));
1816 /* Clear don't-care bits; likely when dynamically creating sessions */
1817 for (i = 0; i < t->match_n_vectors; i++)
1818 e->key[i] &= t->mask[i];
1820 rv = vnet_classify_add_del (t, e, is_add);
1822 return VNET_API_ERROR_NO_SUCH_ENTRY;
1826 static clib_error_t *
1827 classify_session_command_fn (vlib_main_t * vm,
1828 unformat_input_t * input,
1829 vlib_cli_command_t * cmd)
1831 vnet_classify_main_t * cm = &vnet_classify_main;
1833 u32 table_index = ~0;
1834 u32 hit_next_index = ~0;
1835 u64 opaque_index = ~0;
1840 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1842 if (unformat (input, "del"))
1844 else if (unformat (input, "hit-next %U", unformat_ip_next_index,
1847 else if (unformat (input, "l2-input-hit-next %U", unformat_l2_input_next_index,
1850 else if (unformat (input, "l2-output-hit-next %U", unformat_l2_output_next_index,
1853 else if (unformat (input, "acl-hit-next %U", unformat_acl_next_index,
1856 else if (unformat (input, "policer-hit-next %U",
1857 unformat_policer_next_index, &hit_next_index))
1859 else if (unformat (input, "opaque-index %lld", &opaque_index))
1861 else if (unformat (input, "match %U", unformat_classify_match,
1862 cm, &match, table_index))
1864 else if (unformat (input, "advance %d", &advance))
1866 else if (unformat (input, "table-index %d", &table_index))
1870 /* Try registered opaque-index unformat fns */
1871 for (i = 0; i < vec_len (cm->unformat_opaque_index_fns); i++)
1873 if (unformat (input, "%U", cm->unformat_opaque_index_fns[i],
1883 if (table_index == ~0)
1884 return clib_error_return (0, "Table index required");
1886 if (is_add && match == 0)
1887 return clib_error_return (0, "Match value required");
1889 rv = vnet_classify_add_del_session (cm, table_index, match,
1891 opaque_index, advance, is_add);
1899 return clib_error_return (0, "vnet_classify_add_del_session returned %d",
1906 VLIB_CLI_COMMAND (classify_session_command, static) = {
1907 .path = "classify session",
1909 "classify session [hit-next|l2-hit-next|acl-hit-next <next_index>|"
1910 "policer-hit-next <policer_name>]"
1911 "\n table-index <nn> match [hex] [l2] [l3 ip4] [opaque-index <index>]",
1912 .function = classify_session_command_fn,
1916 unformat_opaque_sw_if_index (unformat_input_t * input, va_list * args)
1918 u64 * opaquep = va_arg (*args, u64 *);
1921 if (unformat (input, "opaque-sw_if_index %U", unformat_vnet_sw_interface,
1922 vnet_get_main(), &sw_if_index))
1924 *opaquep = sw_if_index;
1931 unformat_ip_next_node (unformat_input_t * input, va_list * args)
1933 vnet_classify_main_t * cm = &vnet_classify_main;
1934 u32 * next_indexp = va_arg (*args, u32 *);
1938 if (unformat (input, "node %U", unformat_vlib_node,
1939 cm->vlib_main, &node_index))
1941 rv = next_index = vlib_node_add_next
1942 (cm->vlib_main, ip4_classify_node.index, node_index);
1943 next_index = vlib_node_add_next
1944 (cm->vlib_main, ip6_classify_node.index, node_index);
1945 ASSERT(rv == next_index);
1947 *next_indexp = next_index;
1954 unformat_acl_next_node (unformat_input_t * input, va_list * args)
1956 vnet_classify_main_t * cm = &vnet_classify_main;
1957 u32 * next_indexp = va_arg (*args, u32 *);
1961 if (unformat (input, "node %U", unformat_vlib_node,
1962 cm->vlib_main, &node_index))
1964 rv = next_index = vlib_node_add_next
1965 (cm->vlib_main, ip4_inacl_node.index, node_index);
1966 next_index = vlib_node_add_next
1967 (cm->vlib_main, ip6_inacl_node.index, node_index);
1968 ASSERT(rv == next_index);
1970 *next_indexp = next_index;
1977 unformat_l2_input_next_node (unformat_input_t * input, va_list * args)
1979 vnet_classify_main_t * cm = &vnet_classify_main;
1980 u32 * next_indexp = va_arg (*args, u32 *);
1984 if (unformat (input, "input-node %U", unformat_vlib_node,
1985 cm->vlib_main, &node_index))
1987 next_index = vlib_node_add_next
1988 (cm->vlib_main, l2_input_classify_node.index, node_index);
1990 *next_indexp = next_index;
1997 unformat_l2_output_next_node (unformat_input_t * input, va_list * args)
1999 vnet_classify_main_t * cm = &vnet_classify_main;
2000 u32 * next_indexp = va_arg (*args, u32 *);
2004 if (unformat (input, "output-node %U", unformat_vlib_node,
2005 cm->vlib_main, &node_index))
2007 next_index = vlib_node_add_next
2008 (cm->vlib_main, l2_output_classify_node.index, node_index);
2010 *next_indexp = next_index;
2016 static clib_error_t *
2017 vnet_classify_init (vlib_main_t * vm)
2019 vnet_classify_main_t * cm = &vnet_classify_main;
2022 cm->vnet_main = vnet_get_main();
2024 vnet_classify_register_unformat_opaque_index_fn
2025 (unformat_opaque_sw_if_index);
2027 vnet_classify_register_unformat_ip_next_index_fn
2028 (unformat_ip_next_node);
2030 vnet_classify_register_unformat_l2_next_index_fn
2031 (unformat_l2_input_next_node);
2033 vnet_classify_register_unformat_l2_next_index_fn
2034 (unformat_l2_input_next_node);
2036 vnet_classify_register_unformat_l2_next_index_fn
2037 (unformat_l2_output_next_node);
2039 vnet_classify_register_unformat_acl_next_index_fn
2040 (unformat_acl_next_node);
2045 VLIB_INIT_FUNCTION (vnet_classify_init);
2050 static clib_error_t *
2051 test_classify_command_fn (vlib_main_t * vm,
2052 unformat_input_t * input,
2053 vlib_cli_command_t * cmd)
2058 vnet_classify_table_t * t = 0;
2059 classify_data_or_mask_t * mask;
2060 classify_data_or_mask_t * data;
2061 u8 *mp = 0, *dp = 0;
2062 vnet_classify_main_t * cm = &vnet_classify_main;
2063 vnet_classify_entry_t * e;
2066 u32 table_index = ~0;
2069 u32 memory_size = 64<<20;
2071 /* Default starting address 1.0.0.10 */
2072 src.as_u32 = clib_net_to_host_u32 (0x0100000A);
2074 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
2075 if (unformat (input, "sessions %d", &sessions))
2077 else if (unformat (input, "src %U", unformat_ip4_address, &src))
2079 else if (unformat (input, "buckets %d", &buckets))
2081 else if (unformat (input, "memory-size %uM", &tmp))
2082 memory_size = tmp<<20;
2083 else if (unformat (input, "memory-size %uG", &tmp))
2084 memory_size = tmp<<30;
2085 else if (unformat (input, "del"))
2087 else if (unformat (input, "table %d", &table_index))
2093 vec_validate_aligned (mp, 3 * sizeof(u32x4), sizeof(u32x4));
2094 vec_validate_aligned (dp, 3 * sizeof(u32x4), sizeof(u32x4));
2096 mask = (classify_data_or_mask_t *) mp;
2097 data = (classify_data_or_mask_t *) dp;
2099 data->ip.src_address.as_u32 = src.as_u32;
2101 /* Mask on src address */
2102 memset (&mask->ip.src_address, 0xff, 4);
2104 buckets = 1<<max_log2(buckets);
2106 if (table_index != ~0)
2108 if (pool_is_free_index (cm->tables, table_index))
2110 vlib_cli_output (vm, "No such table %d", table_index);
2113 t = pool_elt_at_index (cm->tables, table_index);
2120 t = vnet_classify_new_table (cm, (u8 *)mask, buckets,
2123 3 /* vectors to match */);
2124 t->miss_next_index = IP_LOOKUP_NEXT_LOCAL;
2125 vlib_cli_output (vm, "Create table %d", t - cm->tables);
2128 vlib_cli_output (vm, "Add %d sessions to %d buckets...",
2131 for (i = 0; i < sessions; i++)
2133 rv = vnet_classify_add_del_session (cm, t - cm->tables, (u8 *) data,
2134 IP_LOOKUP_NEXT_DROP,
2135 i+100 /* opaque_index */,
2140 clib_warning ("add: returned %d", rv);
2142 tmp = clib_net_to_host_u32 (data->ip.src_address.as_u32) + 1;
2143 data->ip.src_address.as_u32 = clib_net_to_host_u32 (tmp);
2150 vlib_cli_output (vm, "Must specify table index to delete sessions");
2154 vlib_cli_output (vm, "Try to delete %d sessions...", sessions);
2156 for (i = 0; i < sessions; i++)
2158 u8 * key_minus_skip;
2161 hash = vnet_classify_hash_packet (t, (u8 *) data);
2163 e = vnet_classify_find_entry (t, (u8 *) data, hash, 0 /* time_now */);
2164 /* Previous delete, perhaps... */
2167 ASSERT (e->opaque_index == (i+100));
2169 key_minus_skip = (u8 *)e->key;
2170 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
2172 rv = vnet_classify_add_del_session (cm, t - cm->tables, key_minus_skip,
2173 IP_LOOKUP_NEXT_DROP,
2174 i+100 /* opaque_index */,
2178 clib_warning ("del: returned %d", rv);
2180 tmp = clib_net_to_host_u32 (data->ip.src_address.as_u32) + 1;
2181 data->ip.src_address.as_u32 = clib_net_to_host_u32 (tmp);
2185 vlib_cli_output (vm, "Deleted %d sessions...", deleted);
2194 VLIB_CLI_COMMAND (test_classify_command, static) = {
2195 .path = "test classify",
2197 "test classify [src <ip>] [sessions <nn>] [buckets <nn>] [table <nn>] [del]",
2198 .function = test_classify_command_fn,
2200 #endif /* TEST_CODE */