+openssl_ops_enc_aead (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops,
+ const EVP_CIPHER *cipher, int is_gcm, int is_gmac,
+ const int iv_len)
+{
+ openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
+ vm->thread_index);
+ EVP_CIPHER_CTX *ctx;
+ vnet_crypto_op_chunk_t *chp;
+ u32 i, j;
+ for (i = 0; i < n_ops; i++)
+ {
+ vnet_crypto_op_t *op = ops[i];
+ int len = 0;
+
+ ctx = ptd->evp_cipher_enc_ctx[op->key_index];
+ EVP_EncryptInit_ex (ctx, 0, 0, NULL, op->iv);
+ if (op->aad_len)
+ EVP_EncryptUpdate (ctx, NULL, &len, op->aad, op->aad_len);
+ if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
+ {
+ chp = chunks + op->chunk_index;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ EVP_EncryptUpdate (ctx, is_gmac ? 0 : chp->dst, &len, chp->src,
+ chp->len);
+ chp += 1;
+ }
+ }
+ else
+ EVP_EncryptUpdate (ctx, is_gmac ? 0 : op->dst, &len, op->src, op->len);
+ EVP_EncryptFinal_ex (ctx, is_gmac ? 0 : op->dst + len, &len);
+ EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_GET_TAG, op->tag_len, op->tag);
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
+ }
+ return n_ops;
+}
+
+static_always_inline u32
+openssl_ops_enc_null_gmac (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops,
+ const EVP_CIPHER *cipher, const int iv_len)
+{
+ return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
+ /* is_gcm */ 1, /* is_gmac */ 1, iv_len);
+}
+
+static_always_inline u32
+openssl_ops_enc_gcm (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops,
+ const EVP_CIPHER *cipher, const int iv_len)
+{
+ return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
+ /* is_gcm */ 1, /* is_gmac */ 0, iv_len);
+}
+
+static_always_inline __clib_unused u32
+openssl_ops_enc_chacha20_poly1305 (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops,
+ const EVP_CIPHER *cipher, const int iv_len)
+{
+ return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
+ /* is_gcm */ 0, /* is_gmac */ 0, iv_len);
+}
+
+static_always_inline u32
+openssl_ops_dec_aead (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops,
+ const EVP_CIPHER *cipher, int is_gcm, int is_gmac,
+ const int iv_len)
+{
+ openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
+ vm->thread_index);
+ EVP_CIPHER_CTX *ctx;
+ vnet_crypto_op_chunk_t *chp;
+ u32 i, j, n_fail = 0;
+ for (i = 0; i < n_ops; i++)
+ {
+ vnet_crypto_op_t *op = ops[i];
+ int len = 0;
+
+ ctx = ptd->evp_cipher_dec_ctx[op->key_index];
+ EVP_DecryptInit_ex (ctx, 0, 0, NULL, op->iv);
+ if (op->aad_len)
+ EVP_DecryptUpdate (ctx, 0, &len, op->aad, op->aad_len);
+ if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
+ {
+ chp = chunks + op->chunk_index;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ EVP_DecryptUpdate (ctx, is_gmac ? 0 : chp->dst, &len, chp->src,
+ chp->len);
+ chp += 1;
+ }
+ }
+ else
+ {
+ EVP_DecryptUpdate (ctx, is_gmac ? 0 : op->dst, &len, op->src,
+ op->len);
+ }
+ EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_SET_TAG, op->tag_len, op->tag);
+
+ if (EVP_DecryptFinal_ex (ctx, is_gmac ? 0 : op->dst + len, &len) > 0)
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
+ else
+ {
+ n_fail++;
+ op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
+ }
+ }
+ return n_ops - n_fail;
+}
+
+static_always_inline u32
+openssl_ops_dec_null_gmac (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops,
+ const EVP_CIPHER *cipher, const int iv_len)
+{
+ return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
+ /* is_gcm */ 1, /* is_gmac */ 1, iv_len);
+}
+
+static_always_inline u32
+openssl_ops_dec_gcm (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops,
+ const EVP_CIPHER *cipher, const int iv_len)
+{
+ return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
+ /* is_gcm */ 1, /* is_gmac */ 0, iv_len);
+}
+
+static_always_inline __clib_unused u32
+openssl_ops_dec_chacha20_poly1305 (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops,
+ const EVP_CIPHER *cipher, const int iv_len)
+{
+ return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
+ /* is_gcm */ 0, /* is_gmac */ 0, iv_len);
+}
+
+static_always_inline u32
+openssl_ops_hash (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_MD *md)
+{
+ openssl_per_thread_data_t *ptd =
+ vec_elt_at_index (per_thread_data, vm->thread_index);
+ EVP_MD_CTX *ctx = ptd->hash_ctx;
+ vnet_crypto_op_chunk_t *chp;
+ u32 md_len, i, j, n_fail = 0;
+
+ for (i = 0; i < n_ops; i++)
+ {
+ vnet_crypto_op_t *op = ops[i];
+
+ EVP_DigestInit_ex (ctx, md, NULL);
+ if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
+ {
+ chp = chunks + op->chunk_index;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ EVP_DigestUpdate (ctx, chp->src, chp->len);
+ chp += 1;
+ }
+ }
+ else
+ EVP_DigestUpdate (ctx, op->src, op->len);
+
+ EVP_DigestFinal_ex (ctx, op->digest, &md_len);
+ op->digest_len = md_len;
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
+ }
+ return n_ops - n_fail;
+}
+
+static_always_inline u32
+openssl_ops_hmac (vlib_main_t * vm, vnet_crypto_op_t * ops[],
+ vnet_crypto_op_chunk_t * chunks, u32 n_ops,