2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
15 #include <vnet/classify/vnet_classify.h>
16 #include <vnet/classify/input_acl.h>
17 #include <vnet/ip/ip.h>
18 #include <vnet/api_errno.h> /* for API error numbers */
19 #include <vnet/l2/l2_classify.h> /* for L2_CLASSIFY_NEXT_xxx */
21 vnet_classify_main_t vnet_classify_main;
23 #if VALIDATION_SCAFFOLDING
24 /* Validation scaffolding */
25 void mv (vnet_classify_table_t * t)
29 oldheap = clib_mem_set_heap (t->mheap);
31 clib_mem_set_heap (oldheap);
34 void rogue (vnet_classify_table_t * t)
37 vnet_classify_entry_t * v, * save_v;
38 u32 active_elements = 0;
39 vnet_classify_bucket_t * b;
41 for (i = 0; i < t->nbuckets; i++)
46 save_v = vnet_classify_get_entry (t, b->offset);
47 for (j = 0; j < (1<<b->log2_pages); j++)
49 for (k = 0; k < t->entries_per_page; k++)
51 v = vnet_classify_entry_at_index
52 (t, save_v, j*t->entries_per_page + k);
54 if (vnet_classify_entry_is_busy (v))
60 if (active_elements != t->active_elements)
61 clib_warning ("found %u expected %u elts", active_elements,
65 void mv (vnet_classify_table_t * t) { }
66 void rogue (vnet_classify_table_t * t) { }
69 void vnet_classify_register_unformat_l2_next_index_fn (unformat_function_t * fn)
71 vnet_classify_main_t * cm = &vnet_classify_main;
73 vec_add1 (cm->unformat_l2_next_index_fns, fn);
76 void vnet_classify_register_unformat_ip_next_index_fn (unformat_function_t * fn)
78 vnet_classify_main_t * cm = &vnet_classify_main;
80 vec_add1 (cm->unformat_ip_next_index_fns, fn);
84 vnet_classify_register_unformat_acl_next_index_fn (unformat_function_t * fn)
86 vnet_classify_main_t * cm = &vnet_classify_main;
88 vec_add1 (cm->unformat_acl_next_index_fns, fn);
91 void vnet_classify_register_unformat_opaque_index_fn (unformat_function_t * fn)
93 vnet_classify_main_t * cm = &vnet_classify_main;
95 vec_add1 (cm->unformat_opaque_index_fns, fn);
98 vnet_classify_table_t *
99 vnet_classify_new_table (vnet_classify_main_t *cm,
100 u8 * mask, u32 nbuckets, u32 memory_size,
104 vnet_classify_table_t * t;
107 nbuckets = 1 << (max_log2 (nbuckets));
109 pool_get_aligned (cm->tables, t, CLIB_CACHE_LINE_BYTES);
110 memset(t, 0, sizeof (*t));
112 vec_validate_aligned (t->mask, match_n_vectors - 1, sizeof(u32x4));
113 memcpy (t->mask, mask, match_n_vectors * sizeof (u32x4));
115 t->next_table_index = ~0;
116 t->nbuckets = nbuckets;
117 t->log2_nbuckets = max_log2 (nbuckets);
118 t->match_n_vectors = match_n_vectors;
119 t->skip_n_vectors = skip_n_vectors;
120 t->entries_per_page = 2;
122 t->mheap = mheap_alloc (0 /* use VM */, memory_size);
124 vec_validate_aligned (t->buckets, nbuckets - 1, CLIB_CACHE_LINE_BYTES);
125 oldheap = clib_mem_set_heap (t->mheap);
127 t->writer_lock = clib_mem_alloc_aligned (CLIB_CACHE_LINE_BYTES,
128 CLIB_CACHE_LINE_BYTES);
129 t->writer_lock[0] = 0;
131 clib_mem_set_heap (oldheap);
135 void vnet_classify_delete_table_index (vnet_classify_main_t *cm,
138 vnet_classify_table_t * t;
140 /* Tolerate multiple frees, up to a point */
141 if (pool_is_free_index (cm->tables, table_index))
144 t = pool_elt_at_index (cm->tables, table_index);
145 if (t->next_table_index != ~0)
146 vnet_classify_delete_table_index (cm, t->next_table_index);
149 vec_free (t->buckets);
150 mheap_free (t->mheap);
152 pool_put (cm->tables, t);
155 static vnet_classify_entry_t *
156 vnet_classify_entry_alloc (vnet_classify_table_t * t, u32 log2_pages)
158 vnet_classify_entry_t * rv = 0;
160 vnet_classify_entry_##size##_t * rv##size = 0;
161 foreach_size_in_u32x4;
166 ASSERT (t->writer_lock[0]);
167 if (log2_pages >= vec_len (t->freelists) || t->freelists [log2_pages] == 0)
169 oldheap = clib_mem_set_heap (t->mheap);
171 vec_validate (t->freelists, log2_pages);
173 switch(t->match_n_vectors)
175 /* Euchre the vector allocator into allocating the right sizes */
178 vec_validate_aligned \
179 (rv##size, ((1<<log2_pages)*t->entries_per_page) - 1, \
180 CLIB_CACHE_LINE_BYTES); \
181 rv = (vnet_classify_entry_t *) rv##size; \
183 foreach_size_in_u32x4;
190 clib_mem_set_heap (oldheap);
193 rv = t->freelists[log2_pages];
194 t->freelists[log2_pages] = rv->next_free;
198 ASSERT (vec_len(rv) == (1<<log2_pages)*t->entries_per_page);
200 switch (t->match_n_vectors)
205 memset (rv, 0xff, sizeof (*rv##size) * vec_len(rv)); \
207 foreach_size_in_u32x4;
218 vnet_classify_entry_free (vnet_classify_table_t * t,
219 vnet_classify_entry_t * v)
223 ASSERT (t->writer_lock[0]);
225 free_list_index = min_log2(vec_len(v)/t->entries_per_page);
227 ASSERT(vec_len (t->freelists) > free_list_index);
229 v->next_free = t->freelists[free_list_index];
230 t->freelists[free_list_index] = v;
233 static inline void make_working_copy
234 (vnet_classify_table_t * t, vnet_classify_bucket_t * b)
236 vnet_classify_entry_t * v;
237 vnet_classify_bucket_t working_bucket __attribute__((aligned (8)));
239 vnet_classify_entry_t * working_copy;
241 vnet_classify_entry_##size##_t * working_copy##size = 0;
242 foreach_size_in_u32x4;
244 u32 cpu_number = os_get_cpu_number();
246 if (cpu_number >= vec_len (t->working_copies))
248 oldheap = clib_mem_set_heap (t->mheap);
249 vec_validate (t->working_copies, cpu_number);
250 clib_mem_set_heap (oldheap);
254 * working_copies are per-cpu so that near-simultaneous
255 * updates from multiple threads will not result in sporadic, spurious
258 working_copy = t->working_copies[cpu_number];
260 t->saved_bucket.as_u64 = b->as_u64;
261 oldheap = clib_mem_set_heap (t->mheap);
263 if ((1<<b->log2_pages)*t->entries_per_page > vec_len (working_copy))
265 switch(t->match_n_vectors)
267 /* Euchre the vector allocator into allocating the right sizes */
270 working_copy##size = (void *) working_copy; \
271 vec_validate_aligned \
272 (working_copy##size, \
273 ((1<<b->log2_pages)*t->entries_per_page) - 1, \
274 CLIB_CACHE_LINE_BYTES); \
275 working_copy = (void *) working_copy##size; \
277 foreach_size_in_u32x4;
283 t->working_copies[cpu_number] = working_copy;
286 _vec_len(working_copy) = (1<<b->log2_pages)*t->entries_per_page;
287 clib_mem_set_heap (oldheap);
289 v = vnet_classify_get_entry (t, b->offset);
291 switch(t->match_n_vectors)
295 memcpy (working_copy, v, \
296 sizeof (vnet_classify_entry_##size##_t) \
297 * (1<<b->log2_pages) \
298 * (t->entries_per_page)); \
300 foreach_size_in_u32x4 ;
307 working_bucket.as_u64 = b->as_u64;
308 working_bucket.offset = vnet_classify_get_offset (t, working_copy);
309 CLIB_MEMORY_BARRIER();
310 b->as_u64 = working_bucket.as_u64;
311 t->working_copies[cpu_number] = working_copy;
314 static vnet_classify_entry_t *
315 split_and_rehash (vnet_classify_table_t * t,
316 vnet_classify_entry_t * old_values,
319 vnet_classify_entry_t * new_values, * v, * new_v;
322 new_values = vnet_classify_entry_alloc (t, new_log2_pages);
324 for (i = 0; i < (vec_len (old_values)/t->entries_per_page); i++)
328 for (j = 0; j < t->entries_per_page; j++)
330 v = vnet_classify_entry_at_index
331 (t, old_values, i * t->entries_per_page + j);
333 if (vnet_classify_entry_is_busy (v))
335 /* Hack so we can use the packet hash routine */
337 key_minus_skip = (u8 *) v->key;
338 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
340 new_hash = vnet_classify_hash_packet (t, key_minus_skip);
341 new_hash >>= t->log2_nbuckets;
342 new_hash &= (1<<new_log2_pages) - 1;
344 for (k = 0; k < t->entries_per_page; k++)
346 new_v = vnet_classify_entry_at_index (t, new_values,
349 if (vnet_classify_entry_is_free (new_v))
351 memcpy (new_v, v, sizeof (vnet_classify_entry_t)
352 + (t->match_n_vectors * sizeof (u32x4)));
353 new_v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
357 /* Crap. Tell caller to try again */
358 vnet_classify_entry_free (t, new_values);
368 int vnet_classify_add_del (vnet_classify_table_t * t,
369 vnet_classify_entry_t * add_v,
373 vnet_classify_bucket_t * b, tmp_b;
374 vnet_classify_entry_t * v, * new_v, * save_new_v, * working_copy, * save_v;
380 u32 cpu_number = os_get_cpu_number();
383 ASSERT ((add_v->flags & VNET_CLASSIFY_ENTRY_FREE) == 0);
385 key_minus_skip = (u8 *) add_v->key;
386 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
388 hash = vnet_classify_hash_packet (t, key_minus_skip);
390 bucket_index = hash & (t->nbuckets-1);
391 b = &t->buckets[bucket_index];
393 hash >>= t->log2_nbuckets;
395 while (__sync_lock_test_and_set (t->writer_lock, 1))
398 /* First elt in the bucket? */
407 v = vnet_classify_entry_alloc (t, 0 /* new_log2_pages */);
408 memcpy (v, add_v, sizeof (vnet_classify_entry_t) +
409 t->match_n_vectors * sizeof (u32x4));
410 v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
413 tmp_b.offset = vnet_classify_get_offset (t, v);
415 b->as_u64 = tmp_b.as_u64;
416 t->active_elements ++;
421 make_working_copy (t, b);
423 save_v = vnet_classify_get_entry (t, t->saved_bucket.offset);
424 value_index = hash & ((1<<t->saved_bucket.log2_pages)-1);
429 * For obvious (in hindsight) reasons, see if we're supposed to
430 * replace an existing key, then look for an empty slot.
433 for (i = 0; i < t->entries_per_page; i++)
435 v = vnet_classify_entry_at_index (t, save_v, value_index + i);
437 if (!memcmp (v->key, add_v->key, t->match_n_vectors * sizeof (u32x4)))
439 memcpy (v, add_v, sizeof (vnet_classify_entry_t) +
440 t->match_n_vectors * sizeof(u32x4));
441 v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
443 CLIB_MEMORY_BARRIER();
444 /* Restore the previous (k,v) pairs */
445 b->as_u64 = t->saved_bucket.as_u64;
449 for (i = 0; i < t->entries_per_page; i++)
451 v = vnet_classify_entry_at_index (t, save_v, value_index + i);
453 if (vnet_classify_entry_is_free (v))
455 memcpy (v, add_v, sizeof (vnet_classify_entry_t) +
456 t->match_n_vectors * sizeof(u32x4));
457 v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
458 CLIB_MEMORY_BARRIER();
459 b->as_u64 = t->saved_bucket.as_u64;
460 t->active_elements ++;
464 /* no room at the inn... split case... */
468 for (i = 0; i < t->entries_per_page; i++)
470 v = vnet_classify_entry_at_index (t, save_v, value_index + i);
472 if (!memcmp (v->key, add_v->key, t->match_n_vectors * sizeof (u32x4)))
474 memset (v, 0xff, sizeof (vnet_classify_entry_t) +
475 t->match_n_vectors * sizeof(u32x4));
476 v->flags |= VNET_CLASSIFY_ENTRY_FREE;
477 CLIB_MEMORY_BARRIER();
478 b->as_u64 = t->saved_bucket.as_u64;
479 t->active_elements --;
484 b->as_u64 = t->saved_bucket.as_u64;
488 new_log2_pages = t->saved_bucket.log2_pages + 1;
491 working_copy = t->working_copies[cpu_number];
492 new_v = split_and_rehash (t, working_copy, new_log2_pages);
500 /* Try to add the new entry */
503 key_minus_skip = (u8 *) add_v->key;
504 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
506 new_hash = vnet_classify_hash_packet_inline (t, key_minus_skip);
507 new_hash >>= t->log2_nbuckets;
508 new_hash &= (1<<min_log2((vec_len(new_v)/t->entries_per_page))) - 1;
510 for (i = 0; i < t->entries_per_page; i++)
512 new_v = vnet_classify_entry_at_index (t, save_new_v, new_hash + i);
514 if (vnet_classify_entry_is_free (new_v))
516 memcpy (new_v, add_v, sizeof (vnet_classify_entry_t) +
517 t->match_n_vectors * sizeof(u32x4));
518 new_v->flags &= ~(VNET_CLASSIFY_ENTRY_FREE);
522 /* Crap. Try again */
524 vnet_classify_entry_free (t, save_new_v);
528 tmp_b.log2_pages = min_log2 (vec_len (save_new_v)/t->entries_per_page);
529 tmp_b.offset = vnet_classify_get_offset (t, save_new_v);
530 CLIB_MEMORY_BARRIER();
531 b->as_u64 = tmp_b.as_u64;
532 t->active_elements ++;
533 v = vnet_classify_get_entry (t, t->saved_bucket.offset);
534 vnet_classify_entry_free (t, v);
537 CLIB_MEMORY_BARRIER();
538 t->writer_lock[0] = 0;
543 typedef CLIB_PACKED(struct {
544 ethernet_header_t eh;
546 }) classify_data_or_mask_t;
548 u64 vnet_classify_hash_packet (vnet_classify_table_t * t, u8 * h)
550 return vnet_classify_hash_packet_inline (t, h);
553 vnet_classify_entry_t *
554 vnet_classify_find_entry (vnet_classify_table_t * t,
555 u8 * h, u64 hash, f64 now)
557 return vnet_classify_find_entry_inline (t, h, hash, now);
560 static u8 * format_classify_entry (u8 * s, va_list * args)
562 vnet_classify_table_t * t = va_arg (*args, vnet_classify_table_t *);
563 vnet_classify_entry_t * e = va_arg (*args, vnet_classify_entry_t *);
566 (s, "[%u]: next_index %d advance %d opaque %d\n",
567 vnet_classify_get_offset (t, e), e->next_index, e->advance,
571 s = format (s, " k: %U\n", format_hex_bytes, e->key,
572 t->match_n_vectors * sizeof(u32x4));
574 if (vnet_classify_entry_is_busy (e))
575 s = format (s, " hits %lld, last_heard %.2f\n",
576 e->hits, e->last_heard);
578 s = format (s, " entry is free\n");
582 u8 * format_classify_table (u8 * s, va_list * args)
584 vnet_classify_table_t * t = va_arg (*args, vnet_classify_table_t *);
585 int verbose = va_arg (*args, int);
586 vnet_classify_bucket_t * b;
587 vnet_classify_entry_t * v, * save_v;
589 u64 active_elements = 0;
591 for (i = 0; i < t->nbuckets; i++)
597 s = format (s, "[%d]: empty\n", i);
603 s = format (s, "[%d]: heap offset %d, len %d\n", i,
604 b->offset, (1<<b->log2_pages));
607 save_v = vnet_classify_get_entry (t, b->offset);
608 for (j = 0; j < (1<<b->log2_pages); j++)
610 for (k = 0; k < t->entries_per_page; k++)
613 v = vnet_classify_entry_at_index (t, save_v,
614 j*t->entries_per_page + k);
616 if (vnet_classify_entry_is_free (v))
619 s = format (s, " %d: empty\n",
620 j * t->entries_per_page + k);
625 s = format (s, " %d: %U\n",
626 j * t->entries_per_page + k,
627 format_classify_entry, t, v);
634 s = format (s, " %lld active elements\n", active_elements);
635 s = format (s, " %d free lists\n", vec_len (t->freelists));
639 int vnet_classify_add_del_table (vnet_classify_main_t * cm,
645 u32 next_table_index,
650 vnet_classify_table_t * t;
655 if (memory_size == 0)
656 return VNET_API_ERROR_INVALID_MEMORY_SIZE;
659 return VNET_API_ERROR_INVALID_VALUE;
661 t = vnet_classify_new_table (cm, mask, nbuckets, memory_size,
663 t->next_table_index = next_table_index;
664 t->miss_next_index = miss_next_index;
665 *table_index = t - cm->tables;
669 vnet_classify_delete_table_index (cm, *table_index);
673 #define foreach_ip4_proto_field \
683 uword unformat_ip4_mask (unformat_input_t * input, va_list * args)
685 u8 ** maskp = va_arg (*args, u8 **);
687 u8 found_something = 0;
691 foreach_ip4_proto_field;
697 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
699 if (unformat (input, "version"))
701 else if (unformat (input, "hdr_length"))
703 else if (unformat (input, "src"))
705 else if (unformat (input, "dst"))
707 else if (unformat (input, "proto"))
710 #define _(a) else if (unformat (input, #a)) a=1;
711 foreach_ip4_proto_field
717 #define _(a) found_something += a;
718 foreach_ip4_proto_field;
721 if (found_something == 0)
724 vec_validate (mask, sizeof (*ip) - 1);
726 ip = (ip4_header_t *) mask;
728 #define _(a) if (a) memset (&ip->a, 0xff, sizeof (ip->a));
729 foreach_ip4_proto_field;
732 ip->ip_version_and_header_length = 0;
735 ip->ip_version_and_header_length |= 0xF0;
738 ip->ip_version_and_header_length |= 0x0F;
744 #define foreach_ip6_proto_field \
751 uword unformat_ip6_mask (unformat_input_t * input, va_list * args)
753 u8 ** maskp = va_arg (*args, u8 **);
755 u8 found_something = 0;
757 u32 ip_version_traffic_class_and_flow_label;
760 foreach_ip6_proto_field;
763 u8 traffic_class = 0;
766 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
768 if (unformat (input, "version"))
770 else if (unformat (input, "traffic-class"))
772 else if (unformat (input, "flow-label"))
774 else if (unformat (input, "src"))
776 else if (unformat (input, "dst"))
778 else if (unformat (input, "proto"))
781 #define _(a) else if (unformat (input, #a)) a=1;
782 foreach_ip6_proto_field
788 #define _(a) found_something += a;
789 foreach_ip6_proto_field;
792 if (found_something == 0)
795 vec_validate (mask, sizeof (*ip) - 1);
797 ip = (ip6_header_t *) mask;
799 #define _(a) if (a) memset (&ip->a, 0xff, sizeof (ip->a));
800 foreach_ip6_proto_field;
803 ip_version_traffic_class_and_flow_label = 0;
806 ip_version_traffic_class_and_flow_label |= 0xF0000000;
809 ip_version_traffic_class_and_flow_label |= 0x0FF00000;
812 ip_version_traffic_class_and_flow_label |= 0x000FFFFF;
814 ip->ip_version_traffic_class_and_flow_label =
815 clib_host_to_net_u32 (ip_version_traffic_class_and_flow_label);
821 uword unformat_l3_mask (unformat_input_t * input, va_list * args)
823 u8 ** maskp = va_arg (*args, u8 **);
825 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
826 if (unformat (input, "ip4 %U", unformat_ip4_mask, maskp))
828 else if (unformat (input, "ip6 %U", unformat_ip6_mask, maskp))
836 uword unformat_l2_mask (unformat_input_t * input, va_list * args)
838 u8 ** maskp = va_arg (*args, u8 **);
853 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
854 if (unformat (input, "src"))
856 else if (unformat (input, "dst"))
858 else if (unformat (input, "proto"))
860 else if (unformat (input, "tag1"))
862 else if (unformat (input, "tag2"))
864 else if (unformat (input, "ignore-tag1"))
866 else if (unformat (input, "ignore-tag2"))
868 else if (unformat (input, "cos1"))
870 else if (unformat (input, "cos2"))
872 else if (unformat (input, "dot1q"))
874 else if (unformat (input, "dot1ad"))
879 if ((src + dst + proto + tag1 + tag2 + dot1q + dot1ad +
880 ignore_tag1 + ignore_tag2 + cos1 + cos2) == 0)
883 if (tag1 || ignore_tag1 || cos1 || dot1q)
885 if (tag2 || ignore_tag2 || cos2 || dot1ad)
888 vec_validate (mask, len-1);
891 memset (mask, 0xff, 6);
894 memset (mask + 6, 0xff, 6);
907 mask[21] = mask [20] = 0xff;
928 mask[16] = mask [17] = 0xff;
937 mask[12] = mask [13] = 0xff;
943 uword unformat_classify_mask (unformat_input_t * input, va_list * args)
945 vnet_classify_main_t * CLIB_UNUSED(cm)
946 = va_arg (*args, vnet_classify_main_t *);
947 u8 ** maskp = va_arg (*args, u8 **);
948 u32 * skipp = va_arg (*args, u32 *);
949 u32 * matchp = va_arg (*args, u32 *);
956 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
957 if (unformat (input, "hex %U", unformat_hex_string, &mask))
959 else if (unformat (input, "l2 %U", unformat_l2_mask, &l2))
961 else if (unformat (input, "l3 %U", unformat_l3_mask, &l3))
967 if (mask || l2 || l3)
971 /* "With a free Ethernet header in every package" */
973 vec_validate (l2, 13);
975 vec_append (mask, l3);
979 /* Scan forward looking for the first significant mask octet */
980 for (i = 0; i < vec_len (mask); i++)
984 /* compute (skip, match) params */
985 *skipp = i / sizeof(u32x4);
986 vec_delete (mask, *skipp * sizeof(u32x4), 0);
988 /* Pad mask to an even multiple of the vector size */
989 while (vec_len (mask) % sizeof (u32x4))
992 match = vec_len (mask) / sizeof (u32x4);
994 for (i = match*sizeof(u32x4); i > 0; i-= sizeof(u32x4))
996 u64 *tmp = (u64 *)(mask + (i-sizeof(u32x4)));
997 if (*tmp || *(tmp+1))
1002 clib_warning ("BUG: match 0");
1004 _vec_len (mask) = match * sizeof(u32x4);
1015 #define foreach_l2_next \
1017 _(ethernet, ETHERNET_INPUT) \
1022 uword unformat_l2_next_index (unformat_input_t * input, va_list * args)
1024 vnet_classify_main_t * cm = &vnet_classify_main;
1025 u32 * miss_next_indexp = va_arg (*args, u32 *);
1030 /* First try registered unformat fns, allowing override... */
1031 for (i = 0; i < vec_len (cm->unformat_l2_next_index_fns); i++)
1033 if (unformat (input, "%U", cm->unformat_l2_next_index_fns[i], &tmp))
1041 if (unformat (input, #n)) { next_index = L2_CLASSIFY_NEXT_##N; goto out;}
1045 if (unformat (input, "%d", &tmp))
1054 *miss_next_indexp = next_index;
1058 #define foreach_ip_next \
1064 uword unformat_ip_next_index (unformat_input_t * input, va_list * args)
1066 u32 * miss_next_indexp = va_arg (*args, u32 *);
1067 vnet_classify_main_t * cm = &vnet_classify_main;
1072 /* First try registered unformat fns, allowing override... */
1073 for (i = 0; i < vec_len (cm->unformat_ip_next_index_fns); i++)
1075 if (unformat (input, "%U", cm->unformat_ip_next_index_fns[i], &tmp))
1083 if (unformat (input, #n)) { next_index = IP_LOOKUP_NEXT_##N; goto out;}
1087 if (unformat (input, "%d", &tmp))
1096 *miss_next_indexp = next_index;
1100 #define foreach_acl_next \
1103 uword unformat_acl_next_index (unformat_input_t * input, va_list * args)
1105 u32 * next_indexp = va_arg (*args, u32 *);
1106 vnet_classify_main_t * cm = &vnet_classify_main;
1111 /* First try registered unformat fns, allowing override... */
1112 for (i = 0; i < vec_len (cm->unformat_acl_next_index_fns); i++)
1114 if (unformat (input, "%U", cm->unformat_acl_next_index_fns[i], &tmp))
1122 if (unformat (input, #n)) { next_index = ACL_NEXT_INDEX_##N; goto out;}
1126 if (unformat (input, "permit"))
1131 else if (unformat (input, "%d", &tmp))
1140 *next_indexp = next_index;
1144 static clib_error_t *
1145 classify_table_command_fn (vlib_main_t * vm,
1146 unformat_input_t * input,
1147 vlib_cli_command_t * cmd)
1153 u32 table_index = ~0;
1154 u32 next_table_index = ~0;
1155 u32 miss_next_index = ~0;
1156 u32 memory_size = 2<<20;
1160 vnet_classify_main_t * cm = &vnet_classify_main;
1163 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1164 if (unformat (input, "del"))
1166 else if (unformat (input, "buckets %d", &nbuckets))
1168 else if (unformat (input, "skip %d", &skip))
1170 else if (unformat (input, "match %d", &match))
1172 else if (unformat (input, "table %d", &table_index))
1174 else if (unformat (input, "mask %U", unformat_classify_mask,
1175 cm, &mask, &skip, &match))
1177 else if (unformat (input, "memory-size %uM", &tmp))
1178 memory_size = tmp<<20;
1179 else if (unformat (input, "memory-size %uG", &tmp))
1180 memory_size = tmp<<30;
1181 else if (unformat (input, "next-table %d", &next_table_index))
1183 else if (unformat (input, "miss-next %U", unformat_ip_next_index,
1186 else if (unformat (input, "l2-miss-next %U", unformat_l2_next_index,
1189 else if (unformat (input, "acl-miss-next %U", unformat_acl_next_index,
1197 if (is_add && mask == 0)
1198 return clib_error_return (0, "Mask required");
1200 if (is_add && skip == ~0)
1201 return clib_error_return (0, "skip count required");
1203 if (is_add && match == ~0)
1204 return clib_error_return (0, "match count required");
1206 if (!is_add && table_index == ~0)
1207 return clib_error_return (0, "table index required for delete");
1209 rv = vnet_classify_add_del_table (cm, mask, nbuckets, memory_size,
1210 skip, match, next_table_index, miss_next_index,
1211 &table_index, is_add);
1218 return clib_error_return (0, "vnet_classify_add_del_table returned %d",
1224 VLIB_CLI_COMMAND (classify_table, static) = {
1225 .path = "classify table",
1227 "classify table [miss-next|l2-miss_next|acl-miss-next <next_index>]"
1228 "\n mask <mask-value> buckets <nn> [skip <n>] [match <n>] [del]",
1229 .function = classify_table_command_fn,
1232 static u8 * format_vnet_classify_table (u8 * s, va_list * args)
1234 vnet_classify_main_t * cm = va_arg (*args, vnet_classify_main_t *);
1235 int verbose = va_arg (*args, int);
1236 u32 index = va_arg (*args, u32);
1237 vnet_classify_table_t * t;
1241 s = format (s, "%10s%10s%10s%10s", "TableIdx", "Sessions", "NextTbl",
1242 "NextNode", verbose ? "Details" : "");
1246 t = pool_elt_at_index (cm->tables, index);
1247 s = format (s, "%10u%10d%10d%10d", index, t->active_elements,
1248 t->next_table_index, t->miss_next_index);
1250 s = format (s, "\n Heap: %U", format_mheap, t->mheap, 0 /*verbose*/);
1252 s = format (s, "\n nbuckets %d, skip %d match %d",
1253 t->nbuckets, t->skip_n_vectors, t->match_n_vectors);
1254 s = format (s, "\n mask %U", format_hex_bytes, t->mask,
1255 t->match_n_vectors * sizeof (u32x4));
1260 s = format (s, "\n%U", format_classify_table, t, verbose);
1265 static clib_error_t *
1266 show_classify_tables_command_fn (vlib_main_t * vm,
1267 unformat_input_t * input,
1268 vlib_cli_command_t * cmd)
1270 vnet_classify_main_t * cm = &vnet_classify_main;
1271 vnet_classify_table_t * t;
1272 u32 match_index = ~0;
1277 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1279 if (unformat (input, "index %d", &match_index))
1281 else if (unformat (input, "verbose %d", &verbose))
1283 else if (unformat (input, "verbose"))
1289 pool_foreach (t, cm->tables,
1291 if (match_index == ~0 || (match_index == t - cm->tables))
1292 vec_add1 (indices, t - cm->tables);
1295 if (vec_len(indices))
1297 vlib_cli_output (vm, "%U", format_vnet_classify_table, cm, verbose,
1299 for (i = 0; i < vec_len (indices); i++)
1300 vlib_cli_output (vm, "%U", format_vnet_classify_table, cm,
1301 verbose, indices[i]);
1304 vlib_cli_output (vm, "No classifier tables configured");
1311 VLIB_CLI_COMMAND (show_classify_table_command, static) = {
1312 .path = "show classify tables",
1313 .short_help = "show classify tables [index <nn>]",
1314 .function = show_classify_tables_command_fn,
1317 uword unformat_ip4_match (unformat_input_t * input, va_list * args)
1319 u8 ** matchp = va_arg (*args, u8 **);
1326 int src = 0, dst = 0;
1327 ip4_address_t src_val, dst_val;
1334 int fragment_id = 0;
1335 u32 fragment_id_val;
1341 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1343 if (unformat (input, "version %d", &version_val))
1345 else if (unformat (input, "hdr_length %d", &hdr_length_val))
1347 else if (unformat (input, "src %U", unformat_ip4_address, &src_val))
1349 else if (unformat (input, "dst %U", unformat_ip4_address, &dst_val))
1351 else if (unformat (input, "proto %d", &proto_val))
1353 else if (unformat (input, "tos %d", &tos_val))
1355 else if (unformat (input, "length %d", &length_val))
1357 else if (unformat (input, "fragment_id %d", &fragment_id_val))
1359 else if (unformat (input, "ttl %d", &ttl_val))
1361 else if (unformat (input, "checksum %d", &checksum_val))
1367 if (version + hdr_length + src + dst + proto + tos + length + fragment_id
1368 + ttl + checksum == 0)
1372 * Aligned because we use the real comparison functions
1374 vec_validate_aligned (match, sizeof (*ip) - 1, sizeof(u32x4));
1376 ip = (ip4_header_t *) match;
1378 /* These are realistically matched in practice */
1380 ip->src_address.as_u32 = src_val.as_u32;
1383 ip->dst_address.as_u32 = dst_val.as_u32;
1386 ip->protocol = proto_val;
1389 /* These are not, but they're included for completeness */
1391 ip->ip_version_and_header_length |= (version_val & 0xF)<<4;
1394 ip->ip_version_and_header_length |= (hdr_length_val & 0xF);
1400 ip->length = length_val;
1406 ip->checksum = checksum_val;
1412 uword unformat_ip6_match (unformat_input_t * input, va_list * args)
1414 u8 ** matchp = va_arg (*args, u8 **);
1419 u8 traffic_class = 0;
1420 u32 traffic_class_val;
1423 int src = 0, dst = 0;
1424 ip6_address_t src_val, dst_val;
1427 int payload_length = 0;
1428 u32 payload_length_val;
1431 u32 ip_version_traffic_class_and_flow_label;
1433 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1435 if (unformat (input, "version %d", &version_val))
1437 else if (unformat (input, "traffic_class %d", &traffic_class_val))
1439 else if (unformat (input, "flow_label %d", &flow_label_val))
1441 else if (unformat (input, "src %U", unformat_ip6_address, &src_val))
1443 else if (unformat (input, "dst %U", unformat_ip6_address, &dst_val))
1445 else if (unformat (input, "proto %d", &proto_val))
1447 else if (unformat (input, "payload_length %d", &payload_length_val))
1449 else if (unformat (input, "hop_limit %d", &hop_limit_val))
1455 if (version + traffic_class + flow_label + src + dst + proto +
1456 payload_length + hop_limit == 0)
1460 * Aligned because we use the real comparison functions
1462 vec_validate_aligned (match, sizeof (*ip) - 1, sizeof(u32x4));
1464 ip = (ip6_header_t *) match;
1467 memcpy (&ip->src_address, &src_val, sizeof (ip->src_address));
1470 memcpy (&ip->dst_address, &dst_val, sizeof (ip->dst_address));
1473 ip->protocol = proto_val;
1475 ip_version_traffic_class_and_flow_label = 0;
1478 ip_version_traffic_class_and_flow_label |= (version_val & 0xF) << 28;
1481 ip_version_traffic_class_and_flow_label |= (traffic_class_val & 0xFF) << 20;
1484 ip_version_traffic_class_and_flow_label |= (flow_label_val & 0xFFFFF);
1486 ip->ip_version_traffic_class_and_flow_label =
1487 clib_host_to_net_u32 (ip_version_traffic_class_and_flow_label);
1490 ip->payload_length = clib_host_to_net_u16 (payload_length_val);
1493 ip->hop_limit = hop_limit_val;
1499 uword unformat_l3_match (unformat_input_t * input, va_list * args)
1501 u8 ** matchp = va_arg (*args, u8 **);
1503 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1504 if (unformat (input, "ip4 %U", unformat_ip4_match, matchp))
1506 else if (unformat (input, "ip6 %U", unformat_ip6_match, matchp))
1515 uword unformat_vlan_tag (unformat_input_t * input, va_list * args)
1517 u8 * tagp = va_arg (*args, u8 *);
1520 if (unformat(input, "%d", &tag))
1522 tagp[0] = (tag>>8) & 0x0F;
1523 tagp[1] = tag & 0xFF;
1530 uword unformat_l2_match (unformat_input_t * input, va_list * args)
1532 u8 ** matchp = va_arg (*args, u8 **);
1552 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1553 if (unformat (input, "src %U", unformat_ethernet_address, &src_val))
1555 else if (unformat (input, "dst %U", unformat_ethernet_address, &dst_val))
1557 else if (unformat (input, "proto %U",
1558 unformat_ethernet_type_host_byte_order, &proto_val))
1560 else if (unformat (input, "tag1 %U", unformat_vlan_tag, tag1_val))
1562 else if (unformat (input, "tag2 %U", unformat_vlan_tag, tag2_val))
1564 else if (unformat (input, "ignore-tag1"))
1566 else if (unformat (input, "ignore-tag2"))
1568 else if (unformat (input, "cos1 %d", &cos1_val))
1570 else if (unformat (input, "cos2 %d", &cos2_val))
1575 if ((src + dst + proto + tag1 + tag2 +
1576 ignore_tag1 + ignore_tag2 + cos1 + cos2) == 0)
1579 if (tag1 || ignore_tag1 || cos1)
1581 if (tag2 || ignore_tag2 || cos2)
1584 vec_validate_aligned (match, len-1, sizeof(u32x4));
1587 memcpy (match, dst_val, 6);
1590 memcpy (match + 6, src_val, 6);
1594 /* inner vlan tag */
1595 match[19] = tag2_val[1];
1596 match[18] = tag2_val[0];
1598 match [18] |= (cos2_val & 0x7) << 5;
1601 match[21] = proto_val & 0xff;
1602 match[20] = proto_val >> 8;
1606 match [15] = tag1_val[1];
1607 match [14] = tag1_val[0];
1610 match [14] |= (cos1_val & 0x7) << 5;
1616 match [15] = tag1_val[1];
1617 match [14] = tag1_val[0];
1620 match[17] = proto_val & 0xff;
1621 match[16] = proto_val >> 8;
1624 match [14] |= (cos1_val & 0x7) << 5;
1630 match [18] |= (cos2_val & 0x7) << 5;
1632 match [14] |= (cos1_val & 0x7) << 5;
1635 match[13] = proto_val & 0xff;
1636 match[12] = proto_val >> 8;
1644 uword unformat_classify_match (unformat_input_t * input, va_list * args)
1646 vnet_classify_main_t * cm = va_arg (*args, vnet_classify_main_t *);
1647 u8 ** matchp = va_arg (*args, u8 **);
1648 u32 table_index = va_arg (*args, u32);
1649 vnet_classify_table_t * t;
1655 if (pool_is_free_index (cm->tables, table_index))
1658 t = pool_elt_at_index (cm->tables, table_index);
1660 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1661 if (unformat (input, "hex %U", unformat_hex_string, &match))
1663 else if (unformat (input, "l2 %U", unformat_l2_match, &l2))
1665 else if (unformat (input, "l3 %U", unformat_l3_match, &l3))
1671 if (match || l2 || l3)
1675 /* "Win a free Ethernet header in every packet" */
1677 vec_validate_aligned (l2, 13, sizeof(u32x4));
1679 vec_append_aligned (match, l3, sizeof(u32x4));
1683 /* Make sure the vector is big enough even if key is all 0's */
1684 vec_validate_aligned
1685 (match, ((t->match_n_vectors + t->skip_n_vectors) * sizeof(u32x4)) - 1,
1688 /* Set size, include skipped vectors*/
1689 _vec_len (match) = (t->match_n_vectors+t->skip_n_vectors) * sizeof(u32x4);
1699 int vnet_classify_add_del_session (vnet_classify_main_t * cm,
1707 vnet_classify_table_t * t;
1708 vnet_classify_entry_5_t _max_e __attribute__((aligned (16)));
1709 vnet_classify_entry_t * e;
1712 if (pool_is_free_index (cm->tables, table_index))
1713 return VNET_API_ERROR_NO_SUCH_TABLE;
1715 t = pool_elt_at_index (cm->tables, table_index);
1717 e = (vnet_classify_entry_t *)&_max_e;
1718 e->next_index = hit_next_index;
1719 e->opaque_index = opaque_index;
1720 e->advance = advance;
1725 /* Copy key data, honoring skip_n_vectors */
1726 memcpy (&e->key, match + t->skip_n_vectors * sizeof (u32x4),
1727 t->match_n_vectors * sizeof (u32x4));
1729 /* Clear don't-care bits; likely when dynamically creating sessions */
1730 for (i = 0; i < t->match_n_vectors; i++)
1731 e->key[i] &= t->mask[i];
1733 rv = vnet_classify_add_del (t, e, is_add);
1735 return VNET_API_ERROR_NO_SUCH_ENTRY;
1739 static clib_error_t *
1740 classify_session_command_fn (vlib_main_t * vm,
1741 unformat_input_t * input,
1742 vlib_cli_command_t * cmd)
1744 vnet_classify_main_t * cm = &vnet_classify_main;
1746 u32 table_index = ~0;
1747 u32 hit_next_index = ~0;
1748 u64 opaque_index = ~0;
1753 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1755 if (unformat (input, "del"))
1757 else if (unformat (input, "hit-next %U", unformat_ip_next_index,
1760 else if (unformat (input, "l2-hit-next %U", unformat_l2_next_index,
1763 else if (unformat (input, "acl-hit-next %U", unformat_acl_next_index,
1766 else if (unformat (input, "opaque-index %lld", &opaque_index))
1768 else if (unformat (input, "match %U", unformat_classify_match,
1769 cm, &match, table_index))
1771 else if (unformat (input, "advance %d", &advance))
1773 else if (unformat (input, "table-index %d", &table_index))
1777 /* Try registered opaque-index unformat fns */
1778 for (i = 0; i < vec_len (cm->unformat_opaque_index_fns); i++)
1780 if (unformat (input, "%U", cm->unformat_opaque_index_fns[i],
1790 if (table_index == ~0)
1791 return clib_error_return (0, "Table index required");
1793 if (is_add && match == 0)
1794 return clib_error_return (0, "Match value required");
1796 rv = vnet_classify_add_del_session (cm, table_index, match,
1798 opaque_index, advance, is_add);
1806 return clib_error_return (0, "vnet_classify_add_del_session returned %d",
1813 VLIB_CLI_COMMAND (classify_session_command, static) = {
1814 .path = "classify session",
1816 "classify session [hit-next|l2-hit-next|acl-hit-next <next_index>]"
1817 "\n table-index <nn> match [hex] [l2] [l3 ip4] [opaque-index <index>]",
1818 .function = classify_session_command_fn,
1822 unformat_opaque_sw_if_index (unformat_input_t * input, va_list * args)
1824 u64 * opaquep = va_arg (*args, u64 *);
1827 if (unformat (input, "opaque-sw_if_index %U", unformat_vnet_sw_interface,
1828 vnet_get_main(), &sw_if_index))
1830 *opaquep = sw_if_index;
1837 unformat_ip_next_node (unformat_input_t * input, va_list * args)
1839 vnet_classify_main_t * cm = &vnet_classify_main;
1840 u32 * next_indexp = va_arg (*args, u32 *);
1844 if (unformat (input, "node %U", unformat_vlib_node,
1845 cm->vlib_main, &node_index))
1847 rv = next_index = vlib_node_add_next
1848 (cm->vlib_main, ip4_classify_node.index, node_index);
1849 next_index = vlib_node_add_next
1850 (cm->vlib_main, ip6_classify_node.index, node_index);
1851 ASSERT(rv == next_index);
1853 *next_indexp = next_index;
1860 unformat_acl_next_node (unformat_input_t * input, va_list * args)
1862 vnet_classify_main_t * cm = &vnet_classify_main;
1863 u32 * next_indexp = va_arg (*args, u32 *);
1867 if (unformat (input, "node %U", unformat_vlib_node,
1868 cm->vlib_main, &node_index))
1870 rv = next_index = vlib_node_add_next
1871 (cm->vlib_main, ip4_inacl_node.index, node_index);
1872 next_index = vlib_node_add_next
1873 (cm->vlib_main, ip6_inacl_node.index, node_index);
1874 ASSERT(rv == next_index);
1876 *next_indexp = next_index;
1883 unformat_l2_next_node (unformat_input_t * input, va_list * args)
1885 vnet_classify_main_t * cm = &vnet_classify_main;
1886 u32 * next_indexp = va_arg (*args, u32 *);
1890 if (unformat (input, "node %U", unformat_vlib_node,
1891 cm->vlib_main, &node_index))
1893 next_index = vlib_node_add_next
1894 (cm->vlib_main, l2_classify_node.index, node_index);
1896 *next_indexp = next_index;
1903 static clib_error_t *
1904 vnet_classify_init (vlib_main_t * vm)
1906 vnet_classify_main_t * cm = &vnet_classify_main;
1909 cm->vnet_main = vnet_get_main();
1911 vnet_classify_register_unformat_opaque_index_fn
1912 (unformat_opaque_sw_if_index);
1914 vnet_classify_register_unformat_ip_next_index_fn
1915 (unformat_ip_next_node);
1917 vnet_classify_register_unformat_l2_next_index_fn
1918 (unformat_l2_next_node);
1920 vnet_classify_register_unformat_acl_next_index_fn
1921 (unformat_acl_next_node);
1926 VLIB_INIT_FUNCTION (vnet_classify_init);
1931 static clib_error_t *
1932 test_classify_command_fn (vlib_main_t * vm,
1933 unformat_input_t * input,
1934 vlib_cli_command_t * cmd)
1939 vnet_classify_table_t * t = 0;
1940 classify_data_or_mask_t * mask;
1941 classify_data_or_mask_t * data;
1942 u8 *mp = 0, *dp = 0;
1943 vnet_classify_main_t * cm = &vnet_classify_main;
1944 vnet_classify_entry_t * e;
1947 u32 table_index = ~0;
1950 u32 memory_size = 64<<20;
1952 /* Default starting address 1.0.0.10 */
1953 src.as_u32 = clib_net_to_host_u32 (0x0100000A);
1955 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) {
1956 if (unformat (input, "sessions %d", &sessions))
1958 else if (unformat (input, "src %U", unformat_ip4_address, &src))
1960 else if (unformat (input, "buckets %d", &buckets))
1962 else if (unformat (input, "memory-size %uM", &tmp))
1963 memory_size = tmp<<20;
1964 else if (unformat (input, "memory-size %uG", &tmp))
1965 memory_size = tmp<<30;
1966 else if (unformat (input, "del"))
1968 else if (unformat (input, "table %d", &table_index))
1974 vec_validate_aligned (mp, 3 * sizeof(u32x4), sizeof(u32x4));
1975 vec_validate_aligned (dp, 3 * sizeof(u32x4), sizeof(u32x4));
1977 mask = (classify_data_or_mask_t *) mp;
1978 data = (classify_data_or_mask_t *) dp;
1980 data->ip.src_address.as_u32 = src.as_u32;
1982 /* Mask on src address */
1983 memset (&mask->ip.src_address, 0xff, 4);
1985 buckets = 1<<max_log2(buckets);
1987 if (table_index != ~0)
1989 if (pool_is_free_index (cm->tables, table_index))
1991 vlib_cli_output (vm, "No such table %d", table_index);
1994 t = pool_elt_at_index (cm->tables, table_index);
2001 t = vnet_classify_new_table (cm, (u8 *)mask, buckets,
2004 3 /* vectors to match */);
2005 t->miss_next_index = IP_LOOKUP_NEXT_LOCAL;
2006 vlib_cli_output (vm, "Create table %d", t - cm->tables);
2009 vlib_cli_output (vm, "Add %d sessions to %d buckets...",
2012 for (i = 0; i < sessions; i++)
2014 rv = vnet_classify_add_del_session (cm, t - cm->tables, (u8 *) data,
2015 IP_LOOKUP_NEXT_DROP,
2016 i+100 /* opaque_index */,
2021 clib_warning ("add: returned %d", rv);
2023 tmp = clib_net_to_host_u32 (data->ip.src_address.as_u32) + 1;
2024 data->ip.src_address.as_u32 = clib_net_to_host_u32 (tmp);
2031 vlib_cli_output (vm, "Must specify table index to delete sessions");
2035 vlib_cli_output (vm, "Try to delete %d sessions...", sessions);
2037 for (i = 0; i < sessions; i++)
2039 u8 * key_minus_skip;
2042 hash = vnet_classify_hash_packet (t, (u8 *) data);
2044 e = vnet_classify_find_entry (t, (u8 *) data, hash, 0 /* time_now */);
2045 /* Previous delete, perhaps... */
2048 ASSERT (e->opaque_index == (i+100));
2050 key_minus_skip = (u8 *)e->key;
2051 key_minus_skip -= t->skip_n_vectors * sizeof (u32x4);
2053 rv = vnet_classify_add_del_session (cm, t - cm->tables, key_minus_skip,
2054 IP_LOOKUP_NEXT_DROP,
2055 i+100 /* opaque_index */,
2059 clib_warning ("del: returned %d", rv);
2061 tmp = clib_net_to_host_u32 (data->ip.src_address.as_u32) + 1;
2062 data->ip.src_address.as_u32 = clib_net_to_host_u32 (tmp);
2066 vlib_cli_output (vm, "Deleted %d sessions...", deleted);
2075 VLIB_CLI_COMMAND (test_classify_command, static) = {
2076 .path = "test classify",
2078 "test classify [src <ip>] [sessions <nn>] [buckets <nn>] [table <nn>] [del]",
2079 .function = test_classify_command_fn,
2081 #endif /* TEST_CODE */