+ static_always_inline int
+ convert_async_crypto_id (vnet_crypto_async_op_id_t async_op_id,
+ u32 *crypto_op, u32 *auth_op_or_aad_len,
+ u16 *digest_len, u8 *is_enc)
+ {
+ switch (async_op_id)
+ {
+#define _(n, s, k, t, a) \
+ case VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC: \
+ *crypto_op = VNET_CRYPTO_OP_##n##_ENC; \
+ *auth_op_or_aad_len = a; \
+ *digest_len = t; \
+ *is_enc = 1; \
+ return 1; \
+ case VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC: \
+ *crypto_op = VNET_CRYPTO_OP_##n##_DEC; \
+ *auth_op_or_aad_len = a; \
+ *digest_len = t; \
+ *is_enc = 0; \
+ return 1;
+ foreach_crypto_aead_async_alg
+#undef _
+
+#define _(c, h, s, k, d) \
+ case VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC: \
+ *crypto_op = VNET_CRYPTO_OP_##c##_ENC; \
+ *auth_op_or_aad_len = VNET_CRYPTO_OP_##h##_HMAC; \
+ *digest_len = d; \
+ *is_enc = 1; \
+ return 0; \
+ case VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC: \
+ *crypto_op = VNET_CRYPTO_OP_##c##_DEC; \
+ *auth_op_or_aad_len = VNET_CRYPTO_OP_##h##_HMAC; \
+ *digest_len = d; \
+ *is_enc = 0; \
+ return 0;
+ foreach_crypto_link_async_alg
+#undef _
+
+ default : return -1;
+ }
+
+ return -1;
+ }
+
+ static_always_inline vnet_crypto_async_frame_t *
+ crypto_sw_scheduler_dequeue (vlib_main_t *vm, u32 *nb_elts_processed,
+ u32 *enqueue_thread_idx)
+ {
+ crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
+ crypto_sw_scheduler_per_thread_data_t *ptd =
+ cm->per_thread_data + vm->thread_index;
+ vnet_crypto_async_frame_t *f = 0;
+ crypto_sw_scheduler_queue_t *current_queue = 0;
+ u32 tail, head;
+ u8 found = 0;
+
+ /* get a pending frame to process */
+ if (ptd->self_crypto_enabled)
+ {
+ u32 i = ptd->last_serve_lcore_id + 1;
+
+ while (1)
+ {
+ crypto_sw_scheduler_per_thread_data_t *st;
+ u32 j;
+
+ if (i >= vec_len (cm->per_thread_data))
+ i = 0;
+
+ st = cm->per_thread_data + i;
+
+ if (ptd->last_serve_encrypt)
+ current_queue = &st->queue[CRYPTO_SW_SCHED_QUEUE_TYPE_DECRYPT];
+ else
+ current_queue = &st->queue[CRYPTO_SW_SCHED_QUEUE_TYPE_ENCRYPT];
+
+ tail = current_queue->tail;
+ head = current_queue->head;
+
+ for (j = tail; j != head; j++)
+ {
+
+ f = current_queue->jobs[j & CRYPTO_SW_SCHEDULER_QUEUE_MASK];
+
+ if (!f)
+ continue;
+
+ if (clib_atomic_bool_cmp_and_swap (
+ &f->state, VNET_CRYPTO_FRAME_STATE_PENDING,
+ VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS))
+ {
+ found = 1;
+ break;
+ }
+ }
+
+ if (found || i == ptd->last_serve_lcore_id)
+ {
+ CLIB_MEMORY_STORE_BARRIER ();
+ ptd->last_serve_encrypt = !ptd->last_serve_encrypt;
+ break;
+ }
+
+ i++;
+ }
+
+ ptd->last_serve_lcore_id = i;
+ }
+
+ if (found)
+ {
+ u32 crypto_op, auth_op_or_aad_len;
+ u16 digest_len;
+ u8 is_enc;
+ int ret;
+
+ ret = convert_async_crypto_id (
+ f->op, &crypto_op, &auth_op_or_aad_len, &digest_len, &is_enc);
+
+ if (ret == 1)
+ crypto_sw_scheduler_process_aead (vm, ptd, f, crypto_op,
+ auth_op_or_aad_len, digest_len);
+ else if (ret == 0)
+ crypto_sw_scheduler_process_link (vm, cm, ptd, f, crypto_op,
+ auth_op_or_aad_len, digest_len,
+ is_enc);
+
+ *enqueue_thread_idx = f->enqueue_thread_index;
+ *nb_elts_processed = f->n_elts;
+ }
+
+ if (ptd->last_return_queue)
+ {
+ current_queue = &ptd->queue[CRYPTO_SW_SCHED_QUEUE_TYPE_DECRYPT];
+ ptd->last_return_queue = 0;
+ }
+ else
+ {
+ current_queue = &ptd->queue[CRYPTO_SW_SCHED_QUEUE_TYPE_ENCRYPT];
+ ptd->last_return_queue = 1;
+ }
+
+ tail = current_queue->tail & CRYPTO_SW_SCHEDULER_QUEUE_MASK;
+
+ if (current_queue->jobs[tail] &&
+ current_queue->jobs[tail]->state >= VNET_CRYPTO_FRAME_STATE_SUCCESS)
+ {
+
+ CLIB_MEMORY_STORE_BARRIER ();
+ current_queue->tail++;
+ f = current_queue->jobs[tail];
+ current_queue->jobs[tail] = 0;
+
+ return f;
+ }
+
+ return 0;
+ }