X-Git-Url: https://gerrit.fd.io/r/gitweb?a=blobdiff_plain;f=src%2Fplugins%2Fdpdk%2Fcryptodev%2Fcryptodev_op_data_path.c;h=d0f093e1414ae44d2158cd38438304c2cdaa3d48;hb=7cc17f6df9b3f4b45aaac16ba0aa098d6cd58794;hp=29af2fa1994d5ae5ab3e6bd9b71f24b3c55001e4;hpb=029f039d5d5022d536d2085993a9b7671d898767;p=vpp.git diff --git a/src/plugins/dpdk/cryptodev/cryptodev_op_data_path.c b/src/plugins/dpdk/cryptodev/cryptodev_op_data_path.c index 29af2fa1994..d0f093e1414 100644 --- a/src/plugins/dpdk/cryptodev/cryptodev_op_data_path.c +++ b/src/plugins/dpdk/cryptodev/cryptodev_op_data_path.c @@ -148,6 +148,9 @@ cryptodev_frame_linked_algs_enqueue (vlib_main_t *vm, cryptodev_cache_ring_elt_t *ring_elt = cryptodev_cache_ring_push (ring, frame); + if (PREDICT_FALSE (ring_elt == NULL)) + return -1; + ring_elt->aad_len = 1; ring_elt->op_type = (u8) op_type; return 0; @@ -295,6 +298,10 @@ cryptodev_frame_aead_enqueue (vlib_main_t *vm, ERROR_ASSERT (frame->n_elts > 0); cryptodev_cache_ring_elt_t *ring_elt = cryptodev_cache_ring_push (ring, frame); + + if (PREDICT_FALSE (ring_elt == NULL)) + return -1; + ring_elt->aad_len = aad_len; ring_elt->op_type = (u8) op_type; return 0; @@ -462,7 +469,7 @@ cryptodev_frame_dequeue_internal (vlib_main_t *vm, u32 *nb_elts_processed, vnet_crypto_async_frame_t *frame = NULL; cryptodev_cache_ring_t *ring = &cet->cache_ring; u16 *const deq = &ring->deq_tail; - u16 n_deq, idx, left_to_deq, i; + u16 n_deq, left_to_deq; u16 max_to_deq = 0; u16 inflight = cet->inflight; u8 dequeue_more = 0; @@ -472,29 +479,12 @@ cryptodev_frame_dequeue_internal (vlib_main_t *vm, u32 *nb_elts_processed, u32 n_elts, n; u64 err0 = 0, err1 = 0, err2 = 0, err3 = 0; /* partial errors mask */ - idx = ring->deq_tail; - - for (i = 0; i < VNET_CRYPTO_FRAME_POOL_SIZE; i++) - { - u32 frame_inflight = - CRYPTODEV_CACHE_RING_GET_FRAME_ELTS_INFLIGHT (ring, idx); - - if (PREDICT_TRUE (frame_inflight > 0)) - break; - idx++; - idx &= (VNET_CRYPTO_FRAME_POOL_SIZE - 1); - } - - ERROR_ASSERT (i != VNET_CRYPTO_FRAME_POOL_SIZE); - ring->deq_tail = idx; - left_to_deq = ring->frames[*deq].f->n_elts - ring->frames[*deq].deq_elts_tail; max_to_deq = clib_min (left_to_deq, CRYPTODE_DEQ_MAX); /* deq field can be used to track frame that is currently dequeued based on that you can specify the amount of elements to deq for the frame */ - n_deq = rte_cryptodev_dequeue_burst (cet->cryptodev_id, cet->cryptodev_q, (struct rte_crypto_op **) cops, max_to_deq); @@ -547,9 +537,13 @@ cryptodev_frame_dequeue_internal (vlib_main_t *vm, u32 *nb_elts_processed, ring->frames[*deq].deq_elts_tail += n_deq; if (cryptodev_cache_ring_update_deq_tail (ring, deq)) { + u32 fr_processed = + (CRYPTODEV_CACHE_QUEUE_SIZE - ring->tail + ring->deq_tail) & + CRYPTODEV_CACHE_QUEUE_MASK; + *nb_elts_processed = frame->n_elts; *enqueue_thread_idx = frame->enqueue_thread_index; - dequeue_more = (max_to_deq < CRYPTODE_DEQ_MAX); + dequeue_more = (fr_processed < CRYPTODEV_MAX_PROCESED_IN_CACHE_QUEUE); } cet->inflight = inflight;