+static_always_inline u32
+ipsecmb_ops_chacha_poly_dec (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ u32 n_ops)
+{
+ return ipsecmb_ops_chacha_poly (vm, ops, n_ops, IMB_DIR_DECRYPT);
+}
+
+static_always_inline u32
+ipsecmb_ops_chacha_poly_chained (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks, u32 n_ops,
+ IMB_CIPHER_DIRECTION dir)
+{
+ ipsecmb_main_t *imbm = &ipsecmb_main;
+ ipsecmb_per_thread_data_t *ptd =
+ vec_elt_at_index (imbm->per_thread_data, vm->thread_index);
+ MB_MGR *m = ptd->mgr;
+ u32 i, n_fail = 0, last_key_index = ~0;
+ u8 iv_data[16];
+ u8 *key = 0;
+
+ if (dir == IMB_DIR_ENCRYPT)
+ {
+ for (i = 0; i < n_ops; i++)
+ {
+ vnet_crypto_op_t *op = ops[i];
+ struct chacha20_poly1305_context_data ctx;
+ vnet_crypto_op_chunk_t *chp;
+ __m128i iv;
+ u32 j;
+
+ ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS);
+
+ if (last_key_index != op->key_index)
+ {
+ vnet_crypto_key_t *kd = vnet_crypto_get_key (op->key_index);
+
+ key = kd->data;
+ last_key_index = op->key_index;
+ }
+
+ if (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV)
+ {
+ iv = ptd->cbc_iv;
+ _mm_storeu_si128 ((__m128i *) iv_data, iv);
+ clib_memcpy_fast (op->iv, iv_data, 12);
+ ptd->cbc_iv = _mm_aesenc_si128 (iv, iv);
+ }
+
+ IMB_CHACHA20_POLY1305_INIT (m, key, &ctx, op->iv, op->aad,
+ op->aad_len);
+
+ chp = chunks + op->chunk_index;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ IMB_CHACHA20_POLY1305_ENC_UPDATE (m, key, &ctx, chp->dst,
+ chp->src, chp->len);
+ chp += 1;
+ }
+
+ IMB_CHACHA20_POLY1305_ENC_FINALIZE (m, &ctx, op->tag, op->tag_len);
+
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
+ }
+ }
+ else /* dir == IMB_DIR_DECRYPT */
+ {
+ for (i = 0; i < n_ops; i++)
+ {
+ vnet_crypto_op_t *op = ops[i];
+ struct chacha20_poly1305_context_data ctx;
+ vnet_crypto_op_chunk_t *chp;
+ u8 scratch[16];
+ u32 j;
+
+ ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS);
+
+ if (last_key_index != op->key_index)
+ {
+ vnet_crypto_key_t *kd = vnet_crypto_get_key (op->key_index);
+
+ key = kd->data;
+ last_key_index = op->key_index;
+ }
+
+ IMB_CHACHA20_POLY1305_INIT (m, key, &ctx, op->iv, op->aad,
+ op->aad_len);
+
+ chp = chunks + op->chunk_index;
+ for (j = 0; j < op->n_chunks; j++)
+ {
+ IMB_CHACHA20_POLY1305_DEC_UPDATE (m, key, &ctx, chp->dst,
+ chp->src, chp->len);
+ chp += 1;
+ }
+
+ IMB_CHACHA20_POLY1305_DEC_FINALIZE (m, &ctx, scratch, op->tag_len);
+
+ if (memcmp (op->tag, scratch, op->tag_len))
+ {
+ n_fail = n_fail + 1;
+ op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
+ }
+ else
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
+ }
+ }
+
+ return n_ops - n_fail;
+}
+
+static_always_inline u32
+ipsec_mb_ops_chacha_poly_enc_chained (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks,
+ u32 n_ops)
+{
+ return ipsecmb_ops_chacha_poly_chained (vm, ops, chunks, n_ops,
+ IMB_DIR_ENCRYPT);
+}
+
+static_always_inline u32
+ipsec_mb_ops_chacha_poly_dec_chained (vlib_main_t *vm, vnet_crypto_op_t *ops[],
+ vnet_crypto_op_chunk_t *chunks,
+ u32 n_ops)
+{
+ return ipsecmb_ops_chacha_poly_chained (vm, ops, chunks, n_ops,
+ IMB_DIR_DECRYPT);
+}