while (n_left_from > 0)
{
u32 n_left_to_next;
- u32 buffers_passed = 0;
vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
goto trace;
}
- if (PREDICT_FALSE (!result.ok))
+ if (PREDICT_FALSE (!posted && !result.ok))
{
vlib_node_increment_counter (vm, odp_crypto_esp_decrypt_node.index,
ESP_DECRYPT_ERROR_INTEG_ERROR,
vnet_buffer (b0)->sw_if_index[VLIB_TX] = (u32) ~ 0;
- vnet_buffer (b0)->post_crypto.next_index = next0;
+ vnet_buffer (b0)->post_crypto.next_index = (u8) next0;
}
trace:
{
vlib_validate_buffer_enqueue_x1 (vm, node, next_index, to_next,
n_left_to_next, bi0, next0);
- buffers_passed += 1;
+ }
+ else
+ {
+ to_next -= 1;
+ n_left_to_next += 1;
}
}
- if (buffers_passed > 0)
- vlib_put_next_frame (vm, node, next_index, n_left_to_next);
+ vlib_put_next_frame (vm, node, next_index, n_left_to_next);
}
vlib_node_increment_counter (vm, odp_crypto_esp_decrypt_node.index,
ESP_DECRYPT_ERROR_RX_PKTS,
while (n_left_from > 0)
{
u32 n_left_to_next;
- u32 buffers_passed = 0;
vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
b0->current_length +=
em->esp_integ_algs[sa0->integ_alg].trunc_size;
- vnet_buffer (b0)->post_crypto.next_index = next0;
+ vnet_buffer (b0)->post_crypto.next_index = (u8) next0;
int ret =
odp_crypto_operation (&crypto_op_params, &posted, &result);
vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
to_next, n_left_to_next, bi0,
next0);
- buffers_passed += 1;
}
-
+ else
+ {
+ to_next -= 1;
+ n_left_to_next += 1;
+ }
}
- if (buffers_passed > 0)
- vlib_put_next_frame (vm, node, next_index, n_left_to_next);
+ vlib_put_next_frame (vm, node, next_index, n_left_to_next);
}
vlib_node_increment_counter (vm, odp_crypto_esp_encrypt_node.index,
ESP_ENCRYPT_ERROR_RX_PKTS,