+ op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
+ }
+ }
+ return n_ops - n_fail;
+}
+
+static_always_inline u32
+openssl_ops_dec_gcm (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops,
+ const EVP_CIPHER *cipher, const int iv_len)
+{
+ return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
+ /* is_gcm */ 1, iv_len);
+}
+
+static_always_inline __clib_unused u32
+openssl_ops_dec_chacha20_poly1305 (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops,
+ const EVP_CIPHER *cipher, const int iv_len)
+{
+ return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
+ /* is_gcm */ 0, iv_len);
+}
+
+static_always_inline u32
+openssl_ops_hash (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_MD *md)
+{
+ openssl_per_thread_data_t *ptd =
+ vec_elt_at_index (per_thread_data, vm->thread_index);
+ EVP_MD_CTX *ctx = ptd->hash_ctx;
+ vnet_crypto_op_chunk_t *chp;
+ u32 md_len, i, j, n_fail = 0;
+
+ for (i = 0; i < n_ops; i++)
+ {
+ vnet_crypto_op_t *op = ops[i];
+
+ EVP_DigestInit_ex (ctx, md, NULL);
+ if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
+ {
+ chp = chunks + op->chunk_index;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ EVP_DigestUpdate (ctx, chp->src, chp->len);
+ chp += 1;
+ }