+ crypto_context_t *crctx;
+ clib_bihash_kv_24_8_t kv;
+ vlib_thread_main_t *vtm = vlib_get_thread_main ();
+ int num_threads = 1 /* main thread */ + vtm->n_threads;
+ int i;
+
+ for (i = 0; i < num_threads; i++)
+ {
+ /* *INDENT-OFF* */
+ pool_foreach (crctx, qm->wrk_ctx[i].crypto_ctx_pool) {
+ if (crctx->ckpair_index == ckpair->cert_key_index)
+ {
+ quic_crypto_context_make_key_from_crctx (&kv, crctx);
+ clib_bihash_add_del_24_8 (&qm->wrk_ctx[i].crypto_context_hash, &kv, 0 /* is_add */ );
+ }
+ }
+ /* *INDENT-ON* */
+ }
+ return 0;
+}
+
+static crypto_context_t *
+quic_crypto_context_alloc (u8 thread_index)
+{
+ quic_main_t *qm = &quic_main;
+ crypto_context_t *crctx;
+ u32 idx;
+
+ pool_get (qm->wrk_ctx[thread_index].crypto_ctx_pool, crctx);
+ clib_memset (crctx, 0, sizeof (*crctx));
+ idx = (crctx - qm->wrk_ctx[thread_index].crypto_ctx_pool);
+ crctx->ctx_index = ((u32) thread_index) << 24 | idx;
+
+ return crctx;
+}
+
+static crypto_context_t *
+quic_crypto_context_get (u32 cr_index, u32 thread_index)
+{
+ quic_main_t *qm = &quic_main;
+ ASSERT (cr_index >> 24 == thread_index);
+ return pool_elt_at_index (qm->wrk_ctx[thread_index].crypto_ctx_pool,
+ cr_index & 0x00ffffff);
+}
+
+static clib_error_t *
+quic_list_crypto_context_command_fn (vlib_main_t * vm,
+ unformat_input_t * input,
+ vlib_cli_command_t * cmd)
+{
+ quic_main_t *qm = &quic_main;
+ crypto_context_t *crctx;
+ vlib_thread_main_t *vtm = vlib_get_thread_main ();
+ int i, num_threads = 1 /* main thread */ + vtm->n_threads;
+ for (i = 0; i < num_threads; i++)
+ {
+ /* *INDENT-OFF* */
+ pool_foreach (crctx, qm->wrk_ctx[i].crypto_ctx_pool) {
+ vlib_cli_output (vm, "[%d][Q]%U", i, format_crypto_context, crctx);
+ }
+ /* *INDENT-ON* */
+ }
+ return 0;
+}
+
+static clib_error_t *
+quic_set_max_packets_per_key_fn (vlib_main_t * vm,
+ unformat_input_t * input,
+ vlib_cli_command_t * cmd)
+{
+ quic_main_t *qm = &quic_main;
+ unformat_input_t _line_input, *line_input = &_line_input;
+ u64 tmp;
+
+ if (!unformat_user (input, unformat_line_input, line_input))