typedef struct
{
+ CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
MB_MGR *mgr;
__m128i cbc_iv;
} ipsecmb_per_thread_data_t;
#define _(a, b) \
static_always_inline u32 \
+ipsecmb_ops_gcm_cipher_enc_##a##_chained (vlib_main_t * vm, \
+ vnet_crypto_op_t * ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
+{ \
+ ipsecmb_main_t *imbm = &ipsecmb_main; \
+ ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
+ vm->thread_index); \
+ MB_MGR *m = ptd->mgr; \
+ vnet_crypto_op_chunk_t *chp; \
+ u32 i, j; \
+ \
+ for (i = 0; i < n_ops; i++) \
+ { \
+ struct gcm_key_data *kd; \
+ struct gcm_context_data ctx; \
+ vnet_crypto_op_t *op = ops[i]; \
+ \
+ kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
+ ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS); \
+ IMB_AES##b##_GCM_INIT(m, kd, &ctx, op->iv, op->aad, op->aad_len); \
+ chp = chunks + op->chunk_index; \
+ for (j = 0; j < op->n_chunks; j++) \
+ { \
+ IMB_AES##b##_GCM_ENC_UPDATE (m, kd, &ctx, chp->dst, chp->src, \
+ chp->len); \
+ chp += 1; \
+ } \
+ IMB_AES##b##_GCM_ENC_FINALIZE(m, kd, &ctx, op->tag, op->tag_len); \
+ \
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
+ } \
+ \
+ return n_ops; \
+} \
+ \
+static_always_inline u32 \
ipsecmb_ops_gcm_cipher_enc_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
u32 n_ops) \
{ \
vnet_crypto_op_t *op = ops[i]; \
\
kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
- IMB_AES##b##_GCM_INIT(m, kd, &ctx, op->iv, op->aad, op->aad_len); \
- IMB_AES##b##_GCM_ENC_UPDATE(m, kd, &ctx, op->dst, op->src, op->len); \
- IMB_AES##b##_GCM_ENC_FINALIZE(m, kd, &ctx, op->tag, op->tag_len); \
+ IMB_AES##b##_GCM_ENC (m, kd, &ctx, op->dst, op->src, op->len, op->iv, \
+ op->aad, op->aad_len, op->tag, op->tag_len); \
\
op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
} \
} \
\
static_always_inline u32 \
+ipsecmb_ops_gcm_cipher_dec_##a##_chained (vlib_main_t * vm, \
+ vnet_crypto_op_t * ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
+{ \
+ ipsecmb_main_t *imbm = &ipsecmb_main; \
+ ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, \
+ vm->thread_index); \
+ MB_MGR *m = ptd->mgr; \
+ vnet_crypto_op_chunk_t *chp; \
+ u32 i, j, n_failed = 0; \
+ \
+ for (i = 0; i < n_ops; i++) \
+ { \
+ struct gcm_key_data *kd; \
+ struct gcm_context_data ctx; \
+ vnet_crypto_op_t *op = ops[i]; \
+ u8 scratch[64]; \
+ \
+ kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
+ ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS); \
+ IMB_AES##b##_GCM_INIT(m, kd, &ctx, op->iv, op->aad, op->aad_len); \
+ chp = chunks + op->chunk_index; \
+ for (j = 0; j < op->n_chunks; j++) \
+ { \
+ IMB_AES##b##_GCM_DEC_UPDATE (m, kd, &ctx, chp->dst, chp->src, \
+ chp->len); \
+ chp += 1; \
+ } \
+ IMB_AES##b##_GCM_DEC_FINALIZE(m, kd, &ctx, scratch, op->tag_len); \
+ \
+ if ((memcmp (op->tag, scratch, op->tag_len))) \
+ { \
+ op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; \
+ n_failed++; \
+ } \
+ else \
+ op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; \
+ } \
+ \
+ return n_ops - n_failed; \
+} \
+ \
+static_always_inline u32 \
ipsecmb_ops_gcm_cipher_dec_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
u32 n_ops) \
{ \
u8 scratch[64]; \
\
kd = (struct gcm_key_data *) imbm->key_data[op->key_index]; \
- IMB_AES##b##_GCM_INIT(m, kd, &ctx, op->iv, op->aad, op->aad_len); \
- IMB_AES##b##_GCM_DEC_UPDATE(m, kd, &ctx, op->dst, op->src, op->len); \
- IMB_AES##b##_GCM_DEC_FINALIZE(m, kd, &ctx, scratch, op->tag_len); \
+ IMB_AES##b##_GCM_DEC (m, kd, &ctx, op->dst, op->src, op->len, op->iv, \
+ op->aad, op->aad_len, scratch, op->tag_len); \
\
if ((memcmp (op->tag, scratch, op->tag_len))) \
{ \
clib_mem_free_s (imbm->key_data[idx]);
}
+ /** TODO: add linked alg support **/
+ if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
+ return;
+
kd = imbm->key_data[idx] = clib_mem_alloc_aligned (ad->data_size,
CLIB_CACHE_LINE_BYTES);
IMB_VERSION_STR, 0);
eidx = vnet_crypto_register_engine (vm, "ipsecmb", 80, (char *) name);
- vec_validate (imbm->per_thread_data, tm->n_vlib_mains - 1);
+ vec_validate_aligned (imbm->per_thread_data, tm->n_vlib_mains - 1,
+ CLIB_CACHE_LINE_BYTES);
/* *INDENT-OFF* */
vec_foreach (ptd, imbm->per_thread_data)
ipsecmb_ops_gcm_cipher_enc_##a); \
vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
ipsecmb_ops_gcm_cipher_dec_##a); \
+ vnet_crypto_register_chained_ops_handler \
+ (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
+ ipsecmb_ops_gcm_cipher_enc_##a##_chained); \
+ vnet_crypto_register_chained_ops_handler \
+ (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
+ ipsecmb_ops_gcm_cipher_dec_##a##_chained); \
ad = imbm->alg_data + VNET_CRYPTO_ALG_##a; \
ad->data_size = sizeof (struct gcm_key_data); \
ad->aes_gcm_pre = m->gcm##b##_pre; \