2 * Copyright (c) 2018 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
17 #include <vlib/vlib.h>
18 #include <vnet/crypto/crypto.h>
20 vnet_crypto_main_t crypto_main;
22 static_always_inline void
23 crypto_set_op_status (vnet_crypto_op_t * ops[], u32 n_ops, int status)
27 ops[0]->status = status;
32 static_always_inline u32
33 vnet_crypto_process_ops_call_handler (vlib_main_t * vm,
34 vnet_crypto_main_t * cm,
35 vnet_crypto_op_id_t opt,
36 vnet_crypto_op_t * ops[],
37 vnet_crypto_op_chunk_t * chunks,
47 if (cm->chained_ops_handlers[opt] == 0)
48 crypto_set_op_status (ops, n_ops,
49 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
51 rv = (cm->chained_ops_handlers[opt]) (vm, ops, chunks, n_ops);
55 if (cm->ops_handlers[opt] == 0)
56 crypto_set_op_status (ops, n_ops,
57 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
59 rv = (cm->ops_handlers[opt]) (vm, ops, n_ops);
64 static_always_inline u32
65 vnet_crypto_process_ops_inline (vlib_main_t * vm, vnet_crypto_op_t ops[],
66 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
68 vnet_crypto_main_t *cm = &crypto_main;
69 const int op_q_size = VLIB_FRAME_SIZE;
70 vnet_crypto_op_t *op_queue[op_q_size];
71 vnet_crypto_op_id_t opt, current_op_type = ~0;
77 for (i = 0; i < n_ops; i++)
81 if (current_op_type != opt || n_op_queue >= op_q_size)
83 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
87 current_op_type = opt;
90 op_queue[n_op_queue++] = &ops[i];
93 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
94 op_queue, chunks, n_op_queue);
99 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[], u32 n_ops)
101 return vnet_crypto_process_ops_inline (vm, ops, 0, n_ops);
105 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
106 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
108 return vnet_crypto_process_ops_inline (vm, ops, chunks, n_ops);
112 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
115 vnet_crypto_main_t *cm = &crypto_main;
116 vnet_crypto_engine_t *p;
118 vec_add2 (cm->engines, p, 1);
123 hash_set_mem (cm->engine_index_by_name, p->name, p - cm->engines);
125 return p - cm->engines;
128 static_always_inline void
129 crypto_set_active_engine (vnet_crypto_op_data_t * od,
130 vnet_crypto_op_id_t id, u32 ei,
131 crypto_op_class_type_t oct)
133 vnet_crypto_main_t *cm = &crypto_main;
134 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
136 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_CHAINED)
138 if (ce->chained_ops_handlers[id])
140 od->active_engine_index_chained = ei;
141 cm->chained_ops_handlers[id] = ce->chained_ops_handlers[id];
145 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_SIMPLE)
147 if (ce->ops_handlers[id])
149 od->active_engine_index_simple = ei;
150 cm->ops_handlers[id] = ce->ops_handlers[id];
156 vnet_crypto_set_handler2 (char *alg_name, char *engine,
157 crypto_op_class_type_t oct)
160 vnet_crypto_main_t *cm = &crypto_main;
161 vnet_crypto_alg_data_t *ad;
164 p = hash_get_mem (cm->alg_index_by_name, alg_name);
168 ad = vec_elt_at_index (cm->algs, p[0]);
170 p = hash_get_mem (cm->engine_index_by_name, engine);
174 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
176 vnet_crypto_op_data_t *od;
177 vnet_crypto_op_id_t id = ad->op_by_type[i];
181 od = cm->opt_data + id;
182 crypto_set_active_engine (od, id, p[0], oct);
189 vnet_crypto_is_set_handler (vnet_crypto_alg_t alg)
191 vnet_crypto_main_t *cm = &crypto_main;
192 vnet_crypto_op_id_t opt = 0;
195 if (alg > vec_len (cm->algs))
198 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
199 if ((opt = cm->algs[alg].op_by_type[i]) != 0)
202 return NULL != cm->ops_handlers[opt];
206 vnet_crypto_register_ops_handler_inline (vlib_main_t * vm, u32 engine_index,
207 vnet_crypto_op_id_t opt,
208 vnet_crypto_ops_handler_t * fn,
209 vnet_crypto_chained_ops_handler_t *
212 vnet_crypto_main_t *cm = &crypto_main;
213 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
214 vnet_crypto_op_data_t *otd = cm->opt_data + opt;
215 vec_validate_aligned (cm->ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
216 CLIB_CACHE_LINE_BYTES);
217 vec_validate_aligned (cm->chained_ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
218 CLIB_CACHE_LINE_BYTES);
222 e->ops_handlers[opt] = fn;
223 if (otd->active_engine_index_simple == ~0)
225 otd->active_engine_index_simple = engine_index;
226 cm->ops_handlers[opt] = fn;
229 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_simple);
230 if (ae->priority < e->priority)
231 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_SIMPLE);
236 e->chained_ops_handlers[opt] = cfn;
237 if (otd->active_engine_index_chained == ~0)
239 otd->active_engine_index_chained = engine_index;
240 cm->chained_ops_handlers[opt] = cfn;
243 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_chained);
244 if (ae->priority < e->priority)
245 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_CHAINED);
252 vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
253 vnet_crypto_op_id_t opt,
254 vnet_crypto_ops_handler_t * fn)
256 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, 0);
260 vnet_crypto_register_chained_ops_handler (vlib_main_t * vm, u32 engine_index,
261 vnet_crypto_op_id_t opt,
262 vnet_crypto_chained_ops_handler_t *
265 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, 0, fn);
269 vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
270 vnet_crypto_op_id_t opt,
271 vnet_crypto_ops_handler_t * fn,
272 vnet_crypto_chained_ops_handler_t * cfn)
274 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, cfn);
278 vnet_crypto_register_enqueue_handler (vlib_main_t *vm, u32 engine_index,
279 vnet_crypto_async_op_id_t opt,
280 vnet_crypto_frame_enqueue_t *enqueue_hdl)
282 vnet_crypto_main_t *cm = &crypto_main;
283 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
284 vnet_crypto_async_op_data_t *otd = cm->async_opt_data + opt;
285 vec_validate_aligned (cm->enqueue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS,
286 CLIB_CACHE_LINE_BYTES);
287 vec_validate_aligned (cm->dequeue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS,
288 CLIB_CACHE_LINE_BYTES);
293 e->enqueue_handlers[opt] = enqueue_hdl;
294 if (otd->active_engine_index_async == ~0)
296 otd->active_engine_index_async = engine_index;
297 cm->enqueue_handlers[opt] = enqueue_hdl;
300 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_async);
301 if (ae->priority <= e->priority)
303 otd->active_engine_index_async = engine_index;
304 cm->enqueue_handlers[opt] = enqueue_hdl;
311 engine_index_cmp (void *v1, void *v2)
324 vnet_crypto_update_cm_dequeue_handlers (void)
326 vnet_crypto_main_t *cm = &crypto_main;
327 vnet_crypto_async_op_data_t *otd;
328 vnet_crypto_engine_t *e;
329 u32 *active_engines = 0, *ei, last_ei = ~0, i;
331 vec_reset_length (cm->dequeue_handlers);
333 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_IDS; i++)
335 otd = cm->async_opt_data + i;
336 e = cm->engines + otd->active_engine_index_async;
337 if (!e->dequeue_handler)
339 vec_add1 (active_engines, otd->active_engine_index_async);
342 vec_sort_with_function (active_engines, engine_index_cmp);
344 vec_foreach (ei, active_engines)
346 if (ei[0] == last_ei)
349 e = cm->engines + ei[0];
350 vec_add1 (cm->dequeue_handlers, e->dequeue_handler);
354 vec_free (active_engines);
358 vnet_crypto_register_dequeue_handler (vlib_main_t *vm, u32 engine_index,
359 vnet_crypto_frame_dequeue_t *deq_fn)
361 vnet_crypto_main_t *cm = &crypto_main;
362 vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index);
367 e->dequeue_handler = deq_fn;
373 vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
374 vnet_crypto_key_handler_t * key_handler)
376 vnet_crypto_main_t *cm = &crypto_main;
377 vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index);
378 e->key_op_handler = key_handler;
383 vnet_crypto_key_len_check (vnet_crypto_alg_t alg, u16 length)
387 case VNET_CRYPTO_N_ALGS:
389 case VNET_CRYPTO_ALG_NONE:
393 case VNET_CRYPTO_ALG_##n: \
397 foreach_crypto_cipher_alg foreach_crypto_aead_alg
399 /* HMAC allows any key length */
401 case VNET_CRYPTO_ALG_HMAC_##n: \
403 foreach_crypto_hmac_alg
407 case VNET_CRYPTO_ALG_HASH_##n: \
409 foreach_crypto_hash_alg
417 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data,
421 vnet_crypto_main_t *cm = &crypto_main;
422 vnet_crypto_engine_t *engine;
423 vnet_crypto_key_t *key;
425 u8 need_barrier_sync = 0;
427 if (!vnet_crypto_key_len_check (alg, length))
430 pool_get_aligned_will_expand (cm->keys, need_barrier_sync,
431 CLIB_CACHE_LINE_BYTES);
432 /* If the cm->keys will expand, stop the parade. */
433 if (need_barrier_sync)
434 vlib_worker_thread_barrier_sync (vm);
436 pool_get_zero (cm->keys, key);
438 if (need_barrier_sync)
439 vlib_worker_thread_barrier_release (vm);
441 index = key - cm->keys;
442 key->type = VNET_CRYPTO_KEY_TYPE_DATA;
444 vec_validate_aligned (key->data, length - 1, CLIB_CACHE_LINE_BYTES);
445 clib_memcpy (key->data, data, length);
447 vec_foreach (engine, cm->engines)
448 if (engine->key_op_handler)
449 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
455 vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index)
457 vnet_crypto_main_t *cm = &crypto_main;
458 vnet_crypto_engine_t *engine;
459 vnet_crypto_key_t *key = pool_elt_at_index (cm->keys, index);
462 vec_foreach (engine, cm->engines)
463 if (engine->key_op_handler)
464 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_DEL, index);
467 if (key->type == VNET_CRYPTO_KEY_TYPE_DATA)
469 clib_memset (key->data, 0, vec_len (key->data));
470 vec_free (key->data);
472 else if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
474 key->index_crypto = key->index_integ = 0;
477 pool_put (cm->keys, key);
480 vnet_crypto_async_alg_t
481 vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
482 vnet_crypto_alg_t integ_alg)
484 #define _(c, h, s, k ,d) \
485 if (crypto_alg == VNET_CRYPTO_ALG_##c && \
486 integ_alg == VNET_CRYPTO_ALG_HMAC_##h) \
487 return VNET_CRYPTO_ALG_##c##_##h##_TAG##d;
488 foreach_crypto_link_async_alg
494 vnet_crypto_key_add_linked (vlib_main_t * vm,
495 vnet_crypto_key_index_t index_crypto,
496 vnet_crypto_key_index_t index_integ)
499 vnet_crypto_main_t *cm = &crypto_main;
500 vnet_crypto_engine_t *engine;
501 vnet_crypto_key_t *key_crypto, *key_integ, *key;
502 vnet_crypto_async_alg_t linked_alg;
504 key_crypto = pool_elt_at_index (cm->keys, index_crypto);
505 key_integ = pool_elt_at_index (cm->keys, index_integ);
507 linked_alg = vnet_crypto_link_algs (key_crypto->alg, key_integ->alg);
508 if (linked_alg == ~0)
511 pool_get_zero (cm->keys, key);
512 index = key - cm->keys;
513 key->type = VNET_CRYPTO_KEY_TYPE_LINK;
514 key->index_crypto = index_crypto;
515 key->index_integ = index_integ;
516 key->async_alg = linked_alg;
519 vec_foreach (engine, cm->engines)
520 if (engine->key_op_handler)
521 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
528 crypto_dispatch_enable_disable (int is_enable)
530 vnet_crypto_main_t *cm = &crypto_main;
531 vlib_thread_main_t *tm = vlib_get_thread_main ();
532 u32 skip_master = vlib_num_workers () > 0, i;
533 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
536 CLIB_MEMORY_STORE_BARRIER ();
537 if (is_enable && cm->async_refcnt > 0)
542 VNET_CRYPTO_ASYNC_DISPATCH_POLLING ? VLIB_NODE_STATE_POLLING :
543 VLIB_NODE_STATE_INTERRUPT;
546 if (!is_enable && cm->async_refcnt == 0)
549 state = VLIB_NODE_STATE_DISABLED;
553 for (i = skip_master; i < tm->n_vlib_mains; i++)
555 vlib_main_t *ovm = vlib_get_main_by_index (i);
556 if (state != vlib_node_get_state (ovm, cm->crypto_node_index))
557 vlib_node_set_state (ovm, cm->crypto_node_index, state);
562 static_always_inline void
563 crypto_set_active_async_engine (vnet_crypto_async_op_data_t * od,
564 vnet_crypto_async_op_id_t id, u32 ei)
566 vnet_crypto_main_t *cm = &crypto_main;
567 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
569 if (ce->enqueue_handlers[id] && ce->dequeue_handler)
571 od->active_engine_index_async = ei;
572 cm->enqueue_handlers[id] = ce->enqueue_handlers[id];
573 cm->dequeue_handlers[id] = ce->dequeue_handler;
578 vnet_crypto_set_async_handler2 (char *alg_name, char *engine)
581 vnet_crypto_main_t *cm = &crypto_main;
582 vnet_crypto_async_alg_data_t *ad;
585 if (cm->async_refcnt)
588 p = hash_get_mem (cm->async_alg_index_by_name, alg_name);
592 ad = vec_elt_at_index (cm->async_algs, p[0]);
594 p = hash_get_mem (cm->engine_index_by_name, engine);
598 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++)
600 vnet_crypto_async_op_data_t *od;
601 vnet_crypto_async_op_id_t id = ad->op_by_type[i];
605 od = cm->async_opt_data + id;
606 crypto_set_active_async_engine (od, id, p[0]);
609 vnet_crypto_update_cm_dequeue_handlers ();
615 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name)
617 vnet_crypto_main_t *cm = &crypto_main;
618 vnet_crypto_async_next_node_t *nn = 0;
619 vlib_node_t *cc, *pn;
620 uword index = vec_len (cm->next_nodes);
622 pn = vlib_get_node_by_name (vm, (u8 *) post_node_name);
627 vec_foreach (cm->next_nodes, nn)
629 if (nn->node_idx == pn->index)
634 vec_validate (cm->next_nodes, index);
635 nn = vec_elt_at_index (cm->next_nodes, index);
637 cc = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
638 nn->next_idx = vlib_node_add_named_next (vm, cc->index, post_node_name);
639 nn->node_idx = pn->index;
645 vnet_crypto_request_async_mode (int is_enable)
647 vnet_crypto_main_t *cm = &crypto_main;
648 vlib_thread_main_t *tm = vlib_get_thread_main ();
649 u32 skip_master = vlib_num_workers () > 0, i;
650 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
653 CLIB_MEMORY_STORE_BARRIER ();
654 if (is_enable && cm->async_refcnt == 0)
658 cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING ?
659 VLIB_NODE_STATE_POLLING : VLIB_NODE_STATE_INTERRUPT;
661 if (!is_enable && cm->async_refcnt == 1)
664 state = VLIB_NODE_STATE_DISABLED;
670 for (i = skip_master; i < tm->n_vlib_mains; i++)
672 vlib_main_t *ovm = vlib_get_main_by_index (i);
673 if (state != vlib_node_get_state (ovm, cm->crypto_node_index))
674 vlib_node_set_state (ovm, cm->crypto_node_index, state);
678 vnet_crypto_update_cm_dequeue_handlers ();
682 cm->async_refcnt += 1;
683 else if (cm->async_refcnt > 0)
684 cm->async_refcnt -= 1;
688 vnet_crypto_set_async_dispatch_mode (u8 mode)
690 vnet_crypto_main_t *cm = &crypto_main;
691 u32 skip_master = vlib_num_workers () > 0, i;
692 vlib_thread_main_t *tm = vlib_get_thread_main ();
693 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
695 CLIB_MEMORY_STORE_BARRIER ();
696 cm->dispatch_mode = mode;
697 if (mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT)
700 cm->async_refcnt == 0 ?
701 VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_INTERRUPT;
703 else if (mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING)
706 cm->async_refcnt == 0 ?
707 VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_POLLING;
710 for (i = skip_master; i < tm->n_vlib_mains; i++)
712 vlib_main_t *ovm = vlib_get_main_by_index (i);
713 if (state != vlib_node_get_state (ovm, cm->crypto_node_index))
714 vlib_node_set_state (ovm, cm->crypto_node_index, state);
719 vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t op)
721 vnet_crypto_main_t *cm = &crypto_main;
723 return (op < vec_len (cm->enqueue_handlers) &&
724 NULL != cm->enqueue_handlers[op]);
728 vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid,
729 vnet_crypto_op_id_t did, char *name, u8 is_aead)
731 vnet_crypto_op_type_t eopt, dopt;
732 vnet_crypto_main_t *cm = &crypto_main;
734 cm->algs[alg].name = name;
735 cm->opt_data[eid].alg = cm->opt_data[did].alg = alg;
736 cm->opt_data[eid].active_engine_index_simple = ~0;
737 cm->opt_data[did].active_engine_index_simple = ~0;
738 cm->opt_data[eid].active_engine_index_chained = ~0;
739 cm->opt_data[did].active_engine_index_chained = ~0;
742 eopt = VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT;
743 dopt = VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT;
747 eopt = VNET_CRYPTO_OP_TYPE_ENCRYPT;
748 dopt = VNET_CRYPTO_OP_TYPE_DECRYPT;
750 cm->opt_data[eid].type = eopt;
751 cm->opt_data[did].type = dopt;
752 cm->algs[alg].op_by_type[eopt] = eid;
753 cm->algs[alg].op_by_type[dopt] = did;
754 hash_set_mem (cm->alg_index_by_name, name, alg);
758 vnet_crypto_init_hash_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t id,
761 vnet_crypto_main_t *cm = &crypto_main;
762 cm->algs[alg].name = name;
763 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HASH] = id;
764 cm->opt_data[id].alg = alg;
765 cm->opt_data[id].active_engine_index_simple = ~0;
766 cm->opt_data[id].active_engine_index_chained = ~0;
767 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HASH;
768 hash_set_mem (cm->alg_index_by_name, name, alg);
772 vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg,
773 vnet_crypto_op_id_t id, char *name)
775 vnet_crypto_main_t *cm = &crypto_main;
776 cm->algs[alg].name = name;
777 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HMAC] = id;
778 cm->opt_data[id].alg = alg;
779 cm->opt_data[id].active_engine_index_simple = ~0;
780 cm->opt_data[id].active_engine_index_chained = ~0;
781 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HMAC;
782 hash_set_mem (cm->alg_index_by_name, name, alg);
786 vnet_crypto_init_async_data (vnet_crypto_async_alg_t alg,
787 vnet_crypto_async_op_id_t eid,
788 vnet_crypto_async_op_id_t did, char *name)
790 vnet_crypto_main_t *cm = &crypto_main;
792 cm->async_algs[alg].name = name;
793 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT] = eid;
794 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT] = did;
795 cm->async_opt_data[eid].type = VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT;
796 cm->async_opt_data[eid].alg = alg;
797 cm->async_opt_data[eid].active_engine_index_async = ~0;
798 cm->async_opt_data[eid].active_engine_index_async = ~0;
799 cm->async_opt_data[did].type = VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT;
800 cm->async_opt_data[did].alg = alg;
801 cm->async_opt_data[did].active_engine_index_async = ~0;
802 cm->async_opt_data[did].active_engine_index_async = ~0;
803 hash_set_mem (cm->async_alg_index_by_name, name, alg);
807 vnet_crypto_init (vlib_main_t * vm)
809 vnet_crypto_main_t *cm = &crypto_main;
810 vlib_thread_main_t *tm = vlib_get_thread_main ();
811 vnet_crypto_thread_t *ct = 0;
813 cm->dispatch_mode = VNET_CRYPTO_ASYNC_DISPATCH_POLLING;
814 cm->engine_index_by_name = hash_create_string ( /* size */ 0,
816 cm->alg_index_by_name = hash_create_string (0, sizeof (uword));
817 cm->async_alg_index_by_name = hash_create_string (0, sizeof (uword));
818 vec_validate_aligned (cm->threads, tm->n_vlib_mains, CLIB_CACHE_LINE_BYTES);
819 vec_foreach (ct, cm->threads)
820 pool_alloc_aligned (ct->frame_pool, VNET_CRYPTO_FRAME_POOL_SIZE,
821 CLIB_CACHE_LINE_BYTES);
822 vec_validate (cm->algs, VNET_CRYPTO_N_ALGS);
823 vec_validate (cm->async_algs, VNET_CRYPTO_N_ASYNC_ALGS);
826 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
827 VNET_CRYPTO_OP_##n##_ENC, \
828 VNET_CRYPTO_OP_##n##_DEC, s, 0);
829 foreach_crypto_cipher_alg;
832 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
833 VNET_CRYPTO_OP_##n##_ENC, \
834 VNET_CRYPTO_OP_##n##_DEC, s, 1);
835 foreach_crypto_aead_alg;
838 vnet_crypto_init_hmac_data (VNET_CRYPTO_ALG_HMAC_##n, \
839 VNET_CRYPTO_OP_##n##_HMAC, "hmac-" s);
840 foreach_crypto_hmac_alg;
843 vnet_crypto_init_hash_data (VNET_CRYPTO_ALG_HASH_##n, \
844 VNET_CRYPTO_OP_##n##_HASH, s);
845 foreach_crypto_hash_alg;
847 #define _(n, s, k, t, a) \
848 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, \
849 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
850 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
852 foreach_crypto_aead_async_alg
854 #define _(c, h, s, k ,d) \
855 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##c##_##h##_TAG##d, \
856 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
857 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
859 foreach_crypto_link_async_alg
861 cm->crypto_node_index =
862 vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch")->index;
867 VLIB_INIT_FUNCTION (vnet_crypto_init);
870 * fd.io coding-style-patch-verification: ON
873 * eval: (c-set-style "gnu")