+
+ return 0;
+}
+
+u32
+vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data,
+ u16 length)
+{
+ u32 index;
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_engine_t *engine;
+ vnet_crypto_key_t *key;
+
+ u8 need_barrier_sync = 0;
+
+ if (!vnet_crypto_key_len_check (alg, length))
+ return ~0;
+
+ need_barrier_sync = pool_get_will_expand (cm->keys);
+ /* If the cm->keys will expand, stop the parade. */
+ if (need_barrier_sync)
+ vlib_worker_thread_barrier_sync (vm);
+
+ pool_get_zero (cm->keys, key);
+
+ if (need_barrier_sync)
+ vlib_worker_thread_barrier_release (vm);
+
+ index = key - cm->keys;
+ key->type = VNET_CRYPTO_KEY_TYPE_DATA;
+ key->alg = alg;
+ vec_validate_aligned (key->data, length - 1, CLIB_CACHE_LINE_BYTES);
+ clib_memcpy (key->data, data, length);
+ /* *INDENT-OFF* */
+ vec_foreach (engine, cm->engines)
+ if (engine->key_op_handler)
+ engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
+ /* *INDENT-ON* */
+ return index;
+}
+
+void
+vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index)
+{
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_engine_t *engine;
+ vnet_crypto_key_t *key = pool_elt_at_index (cm->keys, index);
+
+ /* *INDENT-OFF* */
+ vec_foreach (engine, cm->engines)
+ if (engine->key_op_handler)
+ engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_DEL, index);
+ /* *INDENT-ON* */
+
+ if (key->type == VNET_CRYPTO_KEY_TYPE_DATA)
+ {
+ clib_memset (key->data, 0xfe, vec_len (key->data));
+ vec_free (key->data);
+ }
+ else if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
+ {
+ key->index_crypto = key->index_integ = ~0;
+ }
+
+ pool_put (cm->keys, key);
+}
+
+vnet_crypto_async_alg_t
+vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
+ vnet_crypto_alg_t integ_alg)
+{
+#define _(c, h, s, k ,d) \
+ if (crypto_alg == VNET_CRYPTO_ALG_##c && \
+ integ_alg == VNET_CRYPTO_ALG_HMAC_##h) \
+ return VNET_CRYPTO_ALG_##c##_##h##_TAG##d;
+ foreach_crypto_link_async_alg
+#undef _
+ return ~0;
+}
+
+u32
+vnet_crypto_key_add_linked (vlib_main_t * vm,
+ vnet_crypto_key_index_t index_crypto,
+ vnet_crypto_key_index_t index_integ)
+{
+ u32 index;
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_engine_t *engine;
+ vnet_crypto_key_t *key_crypto, *key_integ, *key;
+ vnet_crypto_async_alg_t linked_alg;
+
+ key_crypto = pool_elt_at_index (cm->keys, index_crypto);
+ key_integ = pool_elt_at_index (cm->keys, index_integ);
+
+ linked_alg = vnet_crypto_link_algs (key_crypto->alg, key_integ->alg);
+ if (linked_alg == ~0)
+ return ~0;
+
+ pool_get_zero (cm->keys, key);
+ index = key - cm->keys;
+ key->type = VNET_CRYPTO_KEY_TYPE_LINK;
+ key->index_crypto = index_crypto;
+ key->index_integ = index_integ;
+ key->async_alg = linked_alg;
+
+ /* *INDENT-OFF* */
+ vec_foreach (engine, cm->engines)
+ if (engine->key_op_handler)
+ engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
+ /* *INDENT-ON* */
+
+ return index;
+}
+
+static_always_inline void
+crypto_set_active_async_engine (vnet_crypto_async_op_data_t * od,
+ vnet_crypto_async_op_id_t id, u32 ei)
+{
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
+
+ if (ce->enqueue_handlers[id] && ce->dequeue_handler)
+ {
+ od->active_engine_index_async = ei;
+ cm->enqueue_handlers[id] = ce->enqueue_handlers[id];
+ }
+}
+
+int
+vnet_crypto_set_async_handler2 (char *alg_name, char *engine)
+{
+ uword *p;
+ vnet_crypto_main_t *cm = &crypto_main;
+ vnet_crypto_async_alg_data_t *ad;
+ int i;
+
+ p = hash_get_mem (cm->async_alg_index_by_name, alg_name);
+ if (!p)
+ return -1;
+
+ ad = vec_elt_at_index (cm->async_algs, p[0]);
+
+ p = hash_get_mem (cm->engine_index_by_name, engine);
+ if (!p)
+ return -1;
+
+ for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++)