+vnet_crypto_async_alg_t
+vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
+ vnet_crypto_alg_t integ_alg)
+{
+#define _(c, h, s, k ,d) \
+ if (crypto_alg == VNET_CRYPTO_ALG_##c && \
+ integ_alg == VNET_CRYPTO_ALG_HMAC_##h) \
+ return VNET_CRYPTO_ALG_##c##_##h##_TAG##d;
+ foreach_crypto_link_async_alg
+#undef _
+ return ~0;
+}
+
+u32
+vnet_crypto_key_add_linked (vlib_main_t * vm,
+ vnet_crypto_key_index_t index_crypto,
+ vnet_crypto_key_index_t index_integ)
+{
+ u32 index;
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_engine_t *engine;
+ vnet_crypto_key_t *key_crypto, *key_integ, *key;
+ vnet_crypto_async_alg_t linked_alg;
+
+ key_crypto = pool_elt_at_index (cm->keys, index_crypto);
+ key_integ = pool_elt_at_index (cm->keys, index_integ);
+
+ linked_alg = vnet_crypto_link_algs (key_crypto->alg, key_integ->alg);
+ if (linked_alg == ~0)
+ return ~0;
+
+ pool_get_zero (cm->keys, key);
+ index = key - cm->keys;
+ key->type = VNET_CRYPTO_KEY_TYPE_LINK;
+ key->index_crypto = index_crypto;
+ key->index_integ = index_integ;
+ key->async_alg = linked_alg;
+
+ /* *INDENT-OFF* */
+ vec_foreach (engine, cm->engines)
+ if (engine->key_op_handler)
+ engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
+ /* *INDENT-ON* */
+
+ return index;
+}
+
+static_always_inline void
+crypto_set_active_async_engine (vnet_crypto_async_op_data_t * od,
+ vnet_crypto_async_op_id_t id, u32 ei)
+{
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
+
+ if (ce->enqueue_handlers[id] && ce->dequeue_handler)
+ {
+ od->active_engine_index_async = ei;
+ cm->enqueue_handlers[id] = ce->enqueue_handlers[id];
+ }
+}
+
+int
+vnet_crypto_set_async_handler2 (char *alg_name, char *engine)
+{
+ uword *p;
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_async_alg_data_t *ad;
+ int i;
+
+ p = hash_get_mem (cm->async_alg_index_by_name, alg_name);
+ if (!p)
+ return -1;
+
+ ad = vec_elt_at_index (cm->async_algs, p[0]);
+
+ p = hash_get_mem (cm->engine_index_by_name, engine);
+ if (!p)
+ return -1;
+
+ for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++)
+ {
+ vnet_crypto_async_op_data_t *od;
+ vnet_crypto_async_op_id_t id = ad->op_by_type[i];
+ if (id == 0)
+ continue;
+
+ od = cm->async_opt_data + id;
+ crypto_set_active_async_engine (od, id, p[0]);
+ }
+
+ vnet_crypto_update_cm_dequeue_handlers ();
+
+ return 0;
+}
+
+u32
+vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name)
+{
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_async_next_node_t *nn = 0;
+ vlib_node_t *cc, *pn;
+ uword index = vec_len (cm->next_nodes);
+
+ pn = vlib_get_node_by_name (vm, (u8 *) post_node_name);
+ if (!pn)
+ return ~0;
+
+ /* *INDENT-OFF* */
+ vec_foreach (nn, cm->next_nodes)
+ {
+ if (nn->node_idx == pn->index)
+ return nn->next_idx;
+ }
+ /* *INDENT-ON* */
+
+ vec_validate (cm->next_nodes, index);
+ nn = vec_elt_at_index (cm->next_nodes, index);
+
+ cc = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
+ nn->next_idx = vlib_node_add_named_next (vm, cc->index, post_node_name);
+ nn->node_idx = pn->index;
+
+ return nn->next_idx;
+}
+
+int
+vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t op)
+{
+ vnet_crypto_main_t *cm = &crypto_main;
+
+ return (op < vec_len (cm->enqueue_handlers) &&
+ NULL != cm->enqueue_handlers[op]);
+}
+