X-Git-Url: https://gerrit.fd.io/r/gitweb?a=blobdiff_plain;f=src%2Fplugins%2Fcrypto_ipsecmb%2Fipsecmb.c;h=93654daee510742976d3033ea50e9a5dbf2d3286;hb=92d296551bdc30900fcffcd9f837746b5c829e48;hp=0501dcccb08c41202db09d61899bc824c12fed62;hpb=0c65f52bb9395526613493aa9c042ea4f6dbc1fc;p=vpp.git diff --git a/src/plugins/crypto_ipsecmb/ipsecmb.c b/src/plugins/crypto_ipsecmb/ipsecmb.c index 0501dcccb08..93654daee51 100644 --- a/src/plugins/crypto_ipsecmb/ipsecmb.c +++ b/src/plugins/crypto_ipsecmb/ipsecmb.c @@ -56,7 +56,7 @@ typedef struct { u8 enc_key_exp[EXPANDED_KEY_N_BYTES]; u8 dec_key_exp[EXPANDED_KEY_N_BYTES]; -} ipsecmb_aes_cbc_key_data_t; +} ipsecmb_aes_key_data_t; static ipsecmb_main_t ipsecmb_main = { }; @@ -71,12 +71,15 @@ static ipsecmb_main_t ipsecmb_main = { }; _(SHA512, SHA_512, sha512, 128, 64, 64) /* - * (Alg, key-len-bits) + * (Alg, key-len-bits, JOB_CIPHER_MODE) */ -#define foreach_ipsecmb_cbc_cipher_op \ - _(AES_128_CBC, 128) \ - _(AES_192_CBC, 192) \ - _(AES_256_CBC, 256) +#define foreach_ipsecmb_cipher_op \ + _ (AES_128_CBC, 128, CBC) \ + _ (AES_192_CBC, 192, CBC) \ + _ (AES_256_CBC, 256, CBC) \ + _ (AES_128_CTR, 128, CNTR) \ + _ (AES_192_CTR, 192, CNTR) \ + _ (AES_256_CTR, 256, CNTR) /* * (Alg, key-len-bytes, iv-len-bytes) @@ -86,15 +89,35 @@ static ipsecmb_main_t ipsecmb_main = { }; _(AES_192_GCM, 192) \ _(AES_256_GCM, 256) +static_always_inline vnet_crypto_op_status_t +ipsecmb_status_job (JOB_STS status) +{ + switch (status) + { + case STS_COMPLETED: + return VNET_CRYPTO_OP_STATUS_COMPLETED; + case STS_BEING_PROCESSED: + case STS_COMPLETED_AES: + case STS_COMPLETED_HMAC: + return VNET_CRYPTO_OP_STATUS_WORK_IN_PROGRESS; + case STS_INVALID_ARGS: + case STS_INTERNAL_ERROR: + case STS_ERROR: + return VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR; + } + ASSERT (0); + return VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR; +} + always_inline void ipsecmb_retire_hmac_job (JOB_AES_HMAC * job, u32 * n_fail, u32 digest_size) { vnet_crypto_op_t *op = job->user_data; u32 len = op->digest_len ? op->digest_len : digest_size; - if (STS_COMPLETED != job->status) + if (PREDICT_FALSE (STS_COMPLETED != job->status)) { - op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; + op->status = ipsecmb_status_job (job->status); *n_fail = *n_fail + 1; return; } @@ -180,9 +203,9 @@ ipsecmb_retire_cipher_job (JOB_AES_HMAC * job, u32 * n_fail) { vnet_crypto_op_t *op = job->user_data; - if (STS_COMPLETED != job->status) + if (PREDICT_FALSE (STS_COMPLETED != job->status)) { - op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; + op->status = ipsecmb_status_job (job->status); *n_fail = *n_fail + 1; } else @@ -190,9 +213,10 @@ ipsecmb_retire_cipher_job (JOB_AES_HMAC * job, u32 * n_fail) } static_always_inline u32 -ipsecmb_ops_cbc_cipher_inline (vlib_main_t * vm, vnet_crypto_op_t * ops[], +ipsecmb_ops_aes_cipher_inline (vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops, u32 key_len, - JOB_CIPHER_DIRECTION direction) + JOB_CIPHER_DIRECTION direction, + JOB_CIPHER_MODE cipher_mode) { ipsecmb_main_t *imbm = &ipsecmb_main; ipsecmb_per_thread_data_t *ptd = vec_elt_at_index (imbm->per_thread_data, @@ -202,9 +226,9 @@ ipsecmb_ops_cbc_cipher_inline (vlib_main_t * vm, vnet_crypto_op_t * ops[], for (i = 0; i < n_ops; i++) { - ipsecmb_aes_cbc_key_data_t *kd; + ipsecmb_aes_key_data_t *kd; vnet_crypto_op_t *op = ops[i]; - kd = (ipsecmb_aes_cbc_key_data_t *) imbm->key_data[op->key_index]; + kd = (ipsecmb_aes_key_data_t *) imbm->key_data[op->key_index]; __m128i iv; job = IMB_GET_NEXT_JOB (ptd->mgr); @@ -215,7 +239,7 @@ ipsecmb_ops_cbc_cipher_inline (vlib_main_t * vm, vnet_crypto_op_t * ops[], job->cipher_start_src_offset_in_bytes = 0; job->hash_alg = NULL_HASH; - job->cipher_mode = CBC; + job->cipher_mode = cipher_mode; job->cipher_direction = direction; job->chain_order = (direction == ENCRYPT ? CIPHER_HASH : HASH_CIPHER); @@ -246,20 +270,20 @@ ipsecmb_ops_cbc_cipher_inline (vlib_main_t * vm, vnet_crypto_op_t * ops[], return n_ops - n_fail; } -#define _(a, b) \ -static_always_inline u32 \ -ipsecmb_ops_cbc_cipher_enc_##a (vlib_main_t * vm, \ - vnet_crypto_op_t * ops[], \ - u32 n_ops) \ -{ return ipsecmb_ops_cbc_cipher_inline (vm, ops, n_ops, b, ENCRYPT); } \ - \ -static_always_inline u32 \ -ipsecmb_ops_cbc_cipher_dec_##a (vlib_main_t * vm, \ - vnet_crypto_op_t * ops[], \ - u32 n_ops) \ -{ return ipsecmb_ops_cbc_cipher_inline (vm, ops, n_ops, b, DECRYPT); } \ +#define _(a, b, c) \ + static_always_inline u32 ipsecmb_ops_cipher_enc_##a ( \ + vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops) \ + { \ + return ipsecmb_ops_aes_cipher_inline (vm, ops, n_ops, b, ENCRYPT, c); \ + } \ + \ + static_always_inline u32 ipsecmb_ops_cipher_dec_##a ( \ + vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops) \ + { \ + return ipsecmb_ops_aes_cipher_inline (vm, ops, n_ops, b, DECRYPT, c); \ + } -foreach_ipsecmb_cbc_cipher_op; +foreach_ipsecmb_cipher_op; #undef _ #define _(a, b) \ @@ -402,6 +426,245 @@ ipsecmb_ops_gcm_cipher_dec_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \ foreach_ipsecmb_gcm_cipher_op; #undef _ +#ifdef HAVE_IPSECMB_CHACHA_POLY +always_inline void +ipsecmb_retire_aead_job (JOB_AES_HMAC *job, u32 *n_fail) +{ + vnet_crypto_op_t *op = job->user_data; + u32 len = op->tag_len; + + if (PREDICT_FALSE (STS_COMPLETED != job->status)) + { + op->status = ipsecmb_status_job (job->status); + *n_fail = *n_fail + 1; + return; + } + + if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK) + { + if (memcmp (op->tag, job->auth_tag_output, len)) + { + *n_fail = *n_fail + 1; + op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; + return; + } + } + + clib_memcpy_fast (op->tag, job->auth_tag_output, len); + + op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; +} + +static_always_inline u32 +ipsecmb_ops_chacha_poly (vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops, + IMB_CIPHER_DIRECTION dir) +{ + ipsecmb_main_t *imbm = &ipsecmb_main; + ipsecmb_per_thread_data_t *ptd = + vec_elt_at_index (imbm->per_thread_data, vm->thread_index); + struct IMB_JOB *job; + MB_MGR *m = ptd->mgr; + u32 i, n_fail = 0, last_key_index = ~0; + u8 scratch[VLIB_FRAME_SIZE][16]; + u8 iv_data[16]; + u8 *key = 0; + + for (i = 0; i < n_ops; i++) + { + vnet_crypto_op_t *op = ops[i]; + __m128i iv; + + job = IMB_GET_NEXT_JOB (m); + if (last_key_index != op->key_index) + { + vnet_crypto_key_t *kd = vnet_crypto_get_key (op->key_index); + + key = kd->data; + last_key_index = op->key_index; + } + + job->cipher_direction = dir; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->cipher_mode = IMB_CIPHER_CHACHA20_POLY1305; + job->hash_alg = IMB_AUTH_CHACHA20_POLY1305; + job->enc_keys = job->dec_keys = key; + job->key_len_in_bytes = 32; + + job->u.CHACHA20_POLY1305.aad = op->aad; + job->u.CHACHA20_POLY1305.aad_len_in_bytes = op->aad_len; + job->src = op->src; + job->dst = op->dst; + + if ((dir == IMB_DIR_ENCRYPT) && + (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV)) + { + iv = ptd->cbc_iv; + _mm_storeu_si128 ((__m128i *) iv_data, iv); + clib_memcpy_fast (op->iv, iv_data, 12); + ptd->cbc_iv = _mm_aesenc_si128 (iv, iv); + } + + job->iv = op->iv; + job->iv_len_in_bytes = 12; + job->msg_len_to_cipher_in_bytes = job->msg_len_to_hash_in_bytes = + op->len; + job->cipher_start_src_offset_in_bytes = + job->hash_start_src_offset_in_bytes = 0; + + job->auth_tag_output = scratch[i]; + job->auth_tag_output_len_in_bytes = 16; + + job->user_data = op; + + job = IMB_SUBMIT_JOB_NOCHECK (ptd->mgr); + if (job) + ipsecmb_retire_aead_job (job, &n_fail); + + op++; + } + + while ((job = IMB_FLUSH_JOB (ptd->mgr))) + ipsecmb_retire_aead_job (job, &n_fail); + + return n_ops - n_fail; +} + +static_always_inline u32 +ipsecmb_ops_chacha_poly_enc (vlib_main_t *vm, vnet_crypto_op_t *ops[], + u32 n_ops) +{ + return ipsecmb_ops_chacha_poly (vm, ops, n_ops, IMB_DIR_ENCRYPT); +} + +static_always_inline u32 +ipsecmb_ops_chacha_poly_dec (vlib_main_t *vm, vnet_crypto_op_t *ops[], + u32 n_ops) +{ + return ipsecmb_ops_chacha_poly (vm, ops, n_ops, IMB_DIR_DECRYPT); +} + +static_always_inline u32 +ipsecmb_ops_chacha_poly_chained (vlib_main_t *vm, vnet_crypto_op_t *ops[], + vnet_crypto_op_chunk_t *chunks, u32 n_ops, + IMB_CIPHER_DIRECTION dir) +{ + ipsecmb_main_t *imbm = &ipsecmb_main; + ipsecmb_per_thread_data_t *ptd = + vec_elt_at_index (imbm->per_thread_data, vm->thread_index); + MB_MGR *m = ptd->mgr; + u32 i, n_fail = 0, last_key_index = ~0; + u8 iv_data[16]; + u8 *key = 0; + + if (dir == IMB_DIR_ENCRYPT) + { + for (i = 0; i < n_ops; i++) + { + vnet_crypto_op_t *op = ops[i]; + struct chacha20_poly1305_context_data ctx; + vnet_crypto_op_chunk_t *chp; + __m128i iv; + u32 j; + + ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS); + + if (last_key_index != op->key_index) + { + vnet_crypto_key_t *kd = vnet_crypto_get_key (op->key_index); + + key = kd->data; + last_key_index = op->key_index; + } + + if (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV) + { + iv = ptd->cbc_iv; + _mm_storeu_si128 ((__m128i *) iv_data, iv); + clib_memcpy_fast (op->iv, iv_data, 12); + ptd->cbc_iv = _mm_aesenc_si128 (iv, iv); + } + + IMB_CHACHA20_POLY1305_INIT (m, key, &ctx, op->iv, op->aad, + op->aad_len); + + chp = chunks + op->chunk_index; + for (j = 0; j < op->n_chunks; j++) + { + IMB_CHACHA20_POLY1305_ENC_UPDATE (m, key, &ctx, chp->dst, + chp->src, chp->len); + chp += 1; + } + + IMB_CHACHA20_POLY1305_ENC_FINALIZE (m, &ctx, op->tag, op->tag_len); + + op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; + } + } + else /* dir == IMB_DIR_DECRYPT */ + { + for (i = 0; i < n_ops; i++) + { + vnet_crypto_op_t *op = ops[i]; + struct chacha20_poly1305_context_data ctx; + vnet_crypto_op_chunk_t *chp; + u8 scratch[16]; + u32 j; + + ASSERT (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS); + + if (last_key_index != op->key_index) + { + vnet_crypto_key_t *kd = vnet_crypto_get_key (op->key_index); + + key = kd->data; + last_key_index = op->key_index; + } + + IMB_CHACHA20_POLY1305_INIT (m, key, &ctx, op->iv, op->aad, + op->aad_len); + + chp = chunks + op->chunk_index; + for (j = 0; j < op->n_chunks; j++) + { + IMB_CHACHA20_POLY1305_DEC_UPDATE (m, key, &ctx, chp->dst, + chp->src, chp->len); + chp += 1; + } + + IMB_CHACHA20_POLY1305_DEC_FINALIZE (m, &ctx, scratch, op->tag_len); + + if (memcmp (op->tag, scratch, op->tag_len)) + { + n_fail = n_fail + 1; + op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; + } + else + op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; + } + } + + return n_ops - n_fail; +} + +static_always_inline u32 +ipsec_mb_ops_chacha_poly_enc_chained (vlib_main_t *vm, vnet_crypto_op_t *ops[], + vnet_crypto_op_chunk_t *chunks, + u32 n_ops) +{ + return ipsecmb_ops_chacha_poly_chained (vm, ops, chunks, n_ops, + IMB_DIR_ENCRYPT); +} + +static_always_inline u32 +ipsec_mb_ops_chacha_poly_dec_chained (vlib_main_t *vm, vnet_crypto_op_t *ops[], + vnet_crypto_op_chunk_t *chunks, + u32 n_ops) +{ + return ipsecmb_ops_chacha_poly_chained (vm, ops, chunks, n_ops, + IMB_DIR_DECRYPT); +} +#endif + clib_error_t * crypto_ipsecmb_iv_init (ipsecmb_main_t * imbm) { @@ -469,8 +732,8 @@ crypto_ipsecmb_key_handler (vlib_main_t * vm, vnet_crypto_key_op_t kop, /* AES CBC key expansion */ if (ad->keyexp) { - ad->keyexp (key->data, ((ipsecmb_aes_cbc_key_data_t *) kd)->enc_key_exp, - ((ipsecmb_aes_cbc_key_data_t *) kd)->dec_key_exp); + ad->keyexp (key->data, ((ipsecmb_aes_key_data_t *) kd)->enc_key_exp, + ((ipsecmb_aes_key_data_t *) kd)->dec_key_exp); return; } @@ -560,16 +823,16 @@ crypto_ipsecmb_init (vlib_main_t * vm) foreach_ipsecmb_hmac_op; #undef _ -#define _(a, b) \ - vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \ - ipsecmb_ops_cbc_cipher_enc_##a); \ - vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \ - ipsecmb_ops_cbc_cipher_dec_##a); \ - ad = imbm->alg_data + VNET_CRYPTO_ALG_##a; \ - ad->data_size = sizeof (ipsecmb_aes_cbc_key_data_t); \ - ad->keyexp = m->keyexp_##b; \ - - foreach_ipsecmb_cbc_cipher_op; +#define _(a, b, c) \ + vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \ + ipsecmb_ops_cipher_enc_##a); \ + vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \ + ipsecmb_ops_cipher_dec_##a); \ + ad = imbm->alg_data + VNET_CRYPTO_ALG_##a; \ + ad->data_size = sizeof (ipsecmb_aes_key_data_t); \ + ad->keyexp = m->keyexp_##b; + + foreach_ipsecmb_cipher_op; #undef _ #define _(a, b) \ vnet_crypto_register_ops_handler (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \ @@ -589,6 +852,23 @@ crypto_ipsecmb_init (vlib_main_t * vm) foreach_ipsecmb_gcm_cipher_op; #undef _ +#ifdef HAVE_IPSECMB_CHACHA_POLY + vnet_crypto_register_ops_handler (vm, eidx, + VNET_CRYPTO_OP_CHACHA20_POLY1305_ENC, + ipsecmb_ops_chacha_poly_enc); + vnet_crypto_register_ops_handler (vm, eidx, + VNET_CRYPTO_OP_CHACHA20_POLY1305_DEC, + ipsecmb_ops_chacha_poly_dec); + vnet_crypto_register_chained_ops_handler ( + vm, eidx, VNET_CRYPTO_OP_CHACHA20_POLY1305_ENC, + ipsec_mb_ops_chacha_poly_enc_chained); + vnet_crypto_register_chained_ops_handler ( + vm, eidx, VNET_CRYPTO_OP_CHACHA20_POLY1305_DEC, + ipsec_mb_ops_chacha_poly_dec_chained); + ad = imbm->alg_data + VNET_CRYPTO_ALG_CHACHA20_POLY1305; + ad->data_size = 0; +#endif + vnet_crypto_register_key_handler (vm, eidx, crypto_ipsecmb_key_handler); return (NULL); }