+ipsecmb_ops_gcm_cipher_dec_##a##_chained (vlib_main_t * vm, \
+ vnet_crypto_op_t * ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
+{ \
+ ipsecmb_main_t *imbm = &ipsecmb_main; \
+ ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
+ vm->thread_index); \
+ MB_MGR *m = ptd->mgr; \
+ vnet_crypto_op_chunk_t *chp; \
+ u32 i, j, n_failed = 0; \
+ \
+ for (i = 0; i < n_ops; i++) \
+ { \
+ struct gcm_key_data *kd; \
+ struct gcm_context_data ctx; \
+ vnet_crypto_op_t *op = ops[i]; \
+ u8 scratch[64]; \
+ \
+ kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
+ ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS); \
+ IMB_AES##b##_GCM_INIT(m, kd, &ctx, op->iv, op->aad, op->aad_len); \
+ chp = chunks + op->chunk_index; \
+ for (j = 0; j < op->n_chunks; j++) \
+ { \
+ IMB_AES##b##_GCM_DEC_UPDATE (m, kd, &ctx, chp->dst, chp->src, \
+ chp->len); \
+ chp += 1; \
+ } \
+ IMB_AES##b##_GCM_DEC_FINALIZE(m, kd, &ctx, scratch, op->tag_len); \
+ \
+ if ((memcmp (op->tag, scratch, op->tag_len))) \
+ { \
+ op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; \
+ n_failed++; \
+ } \
+ else \
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
+ } \
+ \
+ return n_ops - n_failed; \
+} \
+ \
+static_always_inline u32 \