From 659b78d463ac2a46536ff1129abf992db6a80c4a Mon Sep 17 00:00:00 2001 From: Ivan Ivanets Date: Thu, 1 May 2025 17:32:21 +0300 Subject: [PATCH] crypto: add new handlers for cbc/ctr+hmac Type: feature Support combined enc/dec/chain + auth algorithms with aes-cbc/ctr for openssl, native and ipsecmb crypto engines Change-Id: I4c970c57c3fce1e78fe25ea3d4a9c1de3b4de0c0 Signed-off-by: Ivan Ivanets --- src/crypto_engines/ipsecmb/ipsecmb.c | 96 +++++++++++++++++++++++++++++++++++- src/crypto_engines/native/aes_cbc.c | 48 +++++++++++++++++- src/crypto_engines/native/aes_ctr.c | 50 +++++++++++++++++-- src/crypto_engines/native/sha2.c | 68 +------------------------ src/crypto_engines/native/sha2.h | 87 ++++++++++++++++++++++++++++++++ src/crypto_engines/openssl/main.c | 20 ++++++-- 6 files changed, 292 insertions(+), 77 deletions(-) create mode 100644 src/crypto_engines/native/sha2.h diff --git a/src/crypto_engines/ipsecmb/ipsecmb.c b/src/crypto_engines/ipsecmb/ipsecmb.c index 3006c5294d5..92b5e9c278a 100644 --- a/src/crypto_engines/ipsecmb/ipsecmb.c +++ b/src/crypto_engines/ipsecmb/ipsecmb.c @@ -85,6 +85,17 @@ static ipsecmb_main_t ipsecmb_main = { }; _ (AES_256_GCM_TAG16_AAD8, 256, 1, 8) \ _ (AES_256_GCM_TAG16_AAD12, 256, 1, 12) +#define foreach_ipsecmb_linked_cipher_auth_op \ + _ (AES_128_CBC_SHA1_TAG12, 128, SHA_1, CBC, 12) \ + _ (AES_192_CBC_SHA1_TAG12, 192, SHA_1, CBC, 12) \ + _ (AES_256_CBC_SHA1_TAG12, 256, SHA_1, CBC, 12) \ + _ (AES_128_CTR_SHA1_TAG12, 128, SHA_1, CNTR, 12) \ + _ (AES_192_CTR_SHA1_TAG12, 192, SHA_1, CNTR, 12) \ + _ (AES_256_CTR_SHA1_TAG12, 256, SHA_1, CNTR, 12) \ + _ (AES_128_CTR_SHA256_TAG16, 128, SHA_256, CNTR, 16) \ + _ (AES_192_CTR_SHA256_TAG16, 192, SHA_256, CNTR, 16) \ + _ (AES_256_CTR_SHA256_TAG16, 256, SHA_256, CNTR, 16) + #define foreach_chacha_poly_fixed_aad_lengths _ (0) _ (8) _ (12) static_always_inline vnet_crypto_op_status_t @@ -519,9 +530,83 @@ get_mgr (vlib_main_t *vm) foreach_ipsecmb_gcm_cipher_op; #undef _ +static_always_inline u32 +ipsecmb_ops_cipher_auth_inline (vlib_main_t *vm, vnet_crypto_op_t *ops[], + u32 n_ops, u32 key_len, u32 tag_len, + IMB_CIPHER_MODE cipher_mode, + IMB_HASH_ALG hash_alg, + IMB_CIPHER_DIRECTION direction) +{ + ipsecmb_main_t *imbm = &ipsecmb_main; + ipsecmb_per_thread_data_t *ptd = imbm->per_thread_data + vm->thread_index; + IMB_JOB *job; + u32 i, n_fail = 0; + + for (i = 0; i < n_ops; i++) + { + ipsecmb_aes_key_data_t *kd; + vnet_crypto_op_t *op = ops[i]; + kd = (ipsecmb_aes_key_data_t *) imbm->key_data[op->key_index]; + + job = IMB_GET_NEXT_JOB (ptd->mgr); + + job->src = op->src; + job->dst = op->dst; + job->msg_len_to_cipher_in_bytes = op->len; + job->cipher_start_src_offset_in_bytes = 0; + + job->cipher_mode = cipher_mode; + job->cipher_direction = direction; + job->hash_alg = hash_alg; + + job->key_len_in_bytes = key_len / 8; + job->enc_keys = kd->enc_key_exp; + job->dec_keys = kd->dec_key_exp; + job->iv = op->iv; + job->iv_len_in_bytes = IMB_AES_BLOCK_SIZE; + + job->auth_tag_output = op->digest; + job->auth_tag_output_len_in_bytes = tag_len; + + job->chain_order = (direction == IMB_DIR_ENCRYPT) ? + IMB_ORDER_CIPHER_HASH : + IMB_ORDER_HASH_CIPHER; + + job->user_data = op; + + job = IMB_SUBMIT_JOB (ptd->mgr); + if (job) + ipsecmb_retire_cipher_job (job, &n_fail); + } + + while ((job = IMB_FLUSH_JOB (ptd->mgr))) + ipsecmb_retire_cipher_job (job, &n_fail); + + return n_ops - n_fail; +} + +#define _(n, k, h, c, t) \ + static_always_inline u32 ipsecmb_ops_enc_##n ( \ + vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops) \ + { \ + return ipsecmb_ops_cipher_auth_inline (vm, ops, n_ops, k, t, \ + IMB_CIPHER_##c, IMB_AUTH_HMAC_##h, \ + IMB_DIR_ENCRYPT); \ + } \ + \ + static_always_inline u32 ipsecmb_ops_dec_##n ( \ + vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops) \ + { \ + return ipsecmb_ops_cipher_auth_inline (vm, ops, n_ops, k, t, \ + IMB_CIPHER_##c, IMB_AUTH_HMAC_##h, \ + IMB_DIR_DECRYPT); \ + } +foreach_ipsecmb_linked_cipher_auth_op +#undef _ + #ifdef HAVE_IPSECMB_CHACHA_POLY -always_inline void -ipsecmb_retire_aead_job (IMB_JOB *job, u32 *n_fail) + always_inline void + ipsecmb_retire_aead_job (IMB_JOB *job, u32 *n_fail) { vnet_crypto_op_t *op = job->user_data; u32 len = op->tag_len; @@ -961,6 +1046,13 @@ vnet_crypto_engine_op_handlers_t op_handlers[] = { foreach_chacha_poly_fixed_aad_lengths #undef _ #endif +#define _(n, k, h, c, t) \ + { .opt = VNET_CRYPTO_OP_##n##_ENC, .fn = ipsecmb_ops_enc_##n }, \ + { .opt = VNET_CRYPTO_OP_##n##_DEC, .fn = ipsecmb_ops_dec_##n }, + + foreach_ipsecmb_linked_cipher_auth_op +#undef _ + {} }; diff --git a/src/crypto_engines/native/aes_cbc.c b/src/crypto_engines/native/aes_cbc.c index b4ed2b3493d..a573195626d 100644 --- a/src/crypto_engines/native/aes_cbc.c +++ b/src/crypto_engines/native/aes_cbc.c @@ -17,9 +17,8 @@ #include #include -#include -#include #include +#include #if __GNUC__ > 4 && !__clang__ && CLIB_DEBUG == 0 #pragma GCC optimize ("O3") @@ -186,3 +185,48 @@ aes_cbc_key_exp_256 (vnet_crypto_key_t *key) foreach_aes_cbc_handler_type; #undef _ +#define foreach_crypto_native_cbc_hmac_op \ + _ (128, 224, CLIB_SHA2_224, 14) \ + _ (192, 224, CLIB_SHA2_224, 14) \ + _ (256, 224, CLIB_SHA2_224, 14) \ + _ (128, 256, CLIB_SHA2_256, 16) \ + _ (192, 256, CLIB_SHA2_256, 16) \ + _ (256, 256, CLIB_SHA2_256, 16) + +#define _(k, b, clib_sha2, t) \ + static u32 crypto_native_ops_enc_aes_##k##_cbc_hmac_sha##b##_tag##t ( \ + vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops) \ + { \ + aes_ops_enc_aes_cbc (vm, ops, n_ops, AES_KEY_##k); \ + crypto_native_ops_hmac_sha2 (vm, ops, n_ops, 0, clib_sha2); \ + return n_ops; \ + } \ + \ + static u32 crypto_native_ops_dec_aes_##k##_cbc_hmac_sha##b##_tag##t ( \ + vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops) \ + { \ + crypto_native_ops_hmac_sha2 (vm, ops, n_ops, 0, clib_sha2); \ + aes_ops_dec_aes_cbc (vm, ops, n_ops, AES_KEY_##k); \ + return n_ops; \ + } \ + \ + CRYPTO_NATIVE_OP_HANDLER (aes_##k##_cbc_hmac_sha##b##_tag##t##_enc) = { \ + .op_id = VNET_CRYPTO_OP_AES_##k##_CBC_SHA##b##_TAG##t##_ENC, \ + .fn = crypto_native_ops_enc_aes_##k##_cbc_hmac_sha##b##_tag##t, \ + .probe = aes_cbc_cpu_probe, \ + }; \ + \ + CRYPTO_NATIVE_OP_HANDLER (aes_##k##_cbc_hmac_sha##b##_tag##t##_dec) = { \ + .op_id = VNET_CRYPTO_OP_AES_##k##_CBC_SHA##b##_TAG##t##_DEC, \ + .fn = crypto_native_ops_dec_aes_##k##_cbc_hmac_sha##b##_tag##t, \ + .probe = aes_cbc_cpu_probe, \ + }; \ + \ + CRYPTO_NATIVE_KEY_HANDLER (aes_##k##_cbc_hmac_sha##b##_tag##t) = { \ + .alg_id = VNET_CRYPTO_ALG_AES_##k##_CBC_SHA##b##_TAG##t, \ + .key_fn = aes_cbc_key_exp_##k, \ + .probe = aes_cbc_cpu_probe, \ + }; + +foreach_crypto_native_cbc_hmac_op +#undef _ \ No newline at end of file diff --git a/src/crypto_engines/native/aes_ctr.c b/src/crypto_engines/native/aes_ctr.c index d39b1c83842..a53762ec8da 100644 --- a/src/crypto_engines/native/aes_ctr.c +++ b/src/crypto_engines/native/aes_ctr.c @@ -4,9 +4,8 @@ #include #include -#include -#include #include +#include #if __GNUC__ > 4 && !__clang__ && CLIB_DEBUG == 0 #pragma GCC optimize("O3") @@ -126,5 +125,50 @@ probe () .probe = probe, \ }; -_ (128) _ (192) _ (256) +_ (128) +_ (192) +_ (256) +#undef _ + +#define foreach_crypto_native_ctr_hmac_op \ + _ (128, 256, CLIB_SHA2_256, 16) \ + _ (192, 256, CLIB_SHA2_256, 16) \ + _ (256, 256, CLIB_SHA2_256, 16) + +#define _(k, b, clib_sha2, t) \ + static u32 crypto_native_ops_enc_aes_##k##_ctr_hmac_sha##b##_tag##t ( \ + vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops) \ + { \ + aes_ops_aes_ctr (vm, ops, n_ops, 0, AES_KEY_##k, 0); \ + crypto_native_ops_hmac_sha2 (vm, ops, n_ops, 0, clib_sha2); \ + return n_ops; \ + } \ + \ + static u32 crypto_native_ops_dec_aes_##k##_ctr_hmac_sha##b##_tag##t ( \ + vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops) \ + { \ + crypto_native_ops_hmac_sha2 (vm, ops, n_ops, 0, clib_sha2); \ + aes_ops_aes_ctr (vm, ops, n_ops, 0, AES_KEY_##k, 0); \ + return n_ops; \ + } \ + \ + CRYPTO_NATIVE_OP_HANDLER (aes_##k##_ctr_hmac_sha##b##_tag##t##_enc) = { \ + .op_id = VNET_CRYPTO_OP_AES_##k##_CTR_SHA##b##_TAG##t##_ENC, \ + .fn = crypto_native_ops_enc_aes_##k##_ctr_hmac_sha##b##_tag##t, \ + .probe = probe, \ + }; \ + \ + CRYPTO_NATIVE_OP_HANDLER (aes_##k##_ctr_hmac_sha##b##_tag##t##_dec) = { \ + .op_id = VNET_CRYPTO_OP_AES_##k##_CTR_SHA##b##_TAG##t##_DEC, \ + .fn = crypto_native_ops_dec_aes_##k##_ctr_hmac_sha##b##_tag##t, \ + .probe = probe, \ + }; \ + \ + CRYPTO_NATIVE_KEY_HANDLER (aes_##k##_ctr_hmac_sha##b##_tag##t) = { \ + .alg_id = VNET_CRYPTO_ALG_AES_##k##_CTR_SHA##b##_TAG##t, \ + .key_fn = aes_ctr_key_exp_##k, \ + .probe = probe, \ + }; + +foreach_crypto_native_ctr_hmac_op #undef _ diff --git a/src/crypto_engines/native/sha2.c b/src/crypto_engines/native/sha2.c index 46a71b5b327..d92bc19b359 100644 --- a/src/crypto_engines/native/sha2.c +++ b/src/crypto_engines/native/sha2.c @@ -4,9 +4,7 @@ #include #include -#include -#include -#include +#include static_always_inline u32 crypto_native_ops_hash_sha2 (vlib_main_t *vm, vnet_crypto_op_t *ops[], @@ -40,70 +38,6 @@ next: return n_ops; } -static_always_inline u32 -crypto_native_ops_hmac_sha2 (vlib_main_t *vm, vnet_crypto_op_t *ops[], - u32 n_ops, vnet_crypto_op_chunk_t *chunks, - clib_sha2_type_t type) -{ - crypto_native_main_t *cm = &crypto_native_main; - vnet_crypto_op_t *op = ops[0]; - u32 n_left = n_ops; - clib_sha2_hmac_ctx_t ctx; - u8 buffer[64]; - u32 sz, n_fail = 0; - - for (; n_left; n_left--, op++) - { - clib_sha2_hmac_init ( - &ctx, type, (clib_sha2_hmac_key_data_t *) cm->key_data[op->key_index]); - if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS) - { - vnet_crypto_op_chunk_t *chp = chunks + op->chunk_index; - for (int j = 0; j < op->n_chunks; j++, chp++) - clib_sha2_hmac_update (&ctx, chp->src, chp->len); - } - else - clib_sha2_hmac_update (&ctx, op->src, op->len); - - clib_sha2_hmac_final (&ctx, buffer); - - if (op->digest_len) - { - sz = op->digest_len; - if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK) - { - if ((memcmp (op->digest, buffer, sz))) - { - n_fail++; - op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; - continue; - } - } - else - clib_memcpy_fast (op->digest, buffer, sz); - } - else - { - sz = clib_sha2_variants[type].digest_size; - if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK) - { - if ((memcmp (op->digest, buffer, sz))) - { - n_fail++; - op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; - continue; - } - } - else - clib_memcpy_fast (op->digest, buffer, sz); - } - - op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; - } - - return n_ops - n_fail; -} - static void * sha2_key_add (vnet_crypto_key_t *key, clib_sha2_type_t type) { diff --git a/src/crypto_engines/native/sha2.h b/src/crypto_engines/native/sha2.h new file mode 100644 index 00000000000..a78c6829fd6 --- /dev/null +++ b/src/crypto_engines/native/sha2.h @@ -0,0 +1,87 @@ +/* + *------------------------------------------------------------------ + * Copyright (c) 2025 Cisco and/or its affiliates. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + *------------------------------------------------------------------ + */ + +#ifndef __sha2_h__ +#define __sha2_h__ +#include +#include +#include + +static_always_inline u32 +crypto_native_ops_hmac_sha2 (vlib_main_t *vm, vnet_crypto_op_t *ops[], + u32 n_ops, vnet_crypto_op_chunk_t *chunks, + clib_sha2_type_t type) +{ + crypto_native_main_t *cm = &crypto_native_main; + vnet_crypto_op_t *op = ops[0]; + u32 n_left = n_ops; + clib_sha2_hmac_ctx_t ctx; + u8 buffer[64]; + u32 sz, n_fail = 0; + + for (; n_left; n_left--, op++) + { + clib_sha2_hmac_init ( + &ctx, type, (clib_sha2_hmac_key_data_t *) cm->key_data[op->key_index]); + if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS) + { + vnet_crypto_op_chunk_t *chp = chunks + op->chunk_index; + for (int j = 0; j < op->n_chunks; j++, chp++) + clib_sha2_hmac_update (&ctx, chp->src, chp->len); + } + else + clib_sha2_hmac_update (&ctx, op->src, op->len); + + clib_sha2_hmac_final (&ctx, buffer); + + if (op->digest_len) + { + sz = op->digest_len; + if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK) + { + if ((memcmp (op->digest, buffer, sz))) + { + n_fail++; + op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; + continue; + } + } + else + clib_memcpy_fast (op->digest, buffer, sz); + } + else + { + sz = clib_sha2_variants[type].digest_size; + if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK) + { + if ((memcmp (op->digest, buffer, sz))) + { + n_fail++; + op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC; + continue; + } + } + else + clib_memcpy_fast (op->digest, buffer, sz); + } + + op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; + } + + return n_ops - n_fail; +} +#endif /* __sha2_h__ */ \ No newline at end of file diff --git a/src/crypto_engines/openssl/main.c b/src/crypto_engines/openssl/main.c index a95c1710a34..aae2141b75b 100644 --- a/src/crypto_engines/openssl/main.c +++ b/src/crypto_engines/openssl/main.c @@ -69,6 +69,20 @@ static u32 num_threads; _ (AES_192_CBC_MD5_TAG12, EVP_aes_192_cbc, EVP_md5, 12) \ _ (AES_256_CBC_MD5_TAG12, EVP_aes_256_cbc, EVP_md5, 12) +#define foreach_openssl_linked_ctr_hmac_op \ + _ (AES_128_CTR_SHA1_TAG12, EVP_aes_128_ctr, EVP_sha1, 12) \ + _ (AES_192_CTR_SHA1_TAG12, EVP_aes_192_ctr, EVP_sha1, 12) \ + _ (AES_256_CTR_SHA1_TAG12, EVP_aes_256_ctr, EVP_sha1, 12) \ + _ (AES_128_CTR_SHA256_TAG16, EVP_aes_128_ctr, EVP_sha256, 16) \ + _ (AES_192_CTR_SHA256_TAG16, EVP_aes_192_ctr, EVP_sha256, 16) \ + _ (AES_256_CTR_SHA256_TAG16, EVP_aes_256_ctr, EVP_sha256, 16) \ + _ (AES_128_CTR_SHA384_TAG24, EVP_aes_128_ctr, EVP_sha384, 24) \ + _ (AES_192_CTR_SHA384_TAG24, EVP_aes_192_ctr, EVP_sha384, 24) \ + _ (AES_256_CTR_SHA384_TAG24, EVP_aes_256_ctr, EVP_sha384, 24) \ + _ (AES_128_CTR_SHA512_TAG32, EVP_aes_128_ctr, EVP_sha512, 32) \ + _ (AES_192_CTR_SHA512_TAG32, EVP_aes_192_ctr, EVP_sha512, 32) \ + _ (AES_256_CTR_SHA512_TAG32, EVP_aes_256_ctr, EVP_sha512, 32) + #define foreach_openssl_chacha20_evp_op \ _ (chacha20_poly1305, CHACHA20_POLY1305, EVP_chacha20_poly1305, 0, 0) \ _ (chacha20_poly1305, CHACHA20_POLY1305_TAG16_AAD0, EVP_chacha20_poly1305, \ @@ -678,7 +692,7 @@ foreach_openssl_evp_op; openssl_ctx_hmac (key, kop, idx, m ()); \ return NULL; \ } -foreach_openssl_linked_cbc_hmac_op +foreach_openssl_linked_cbc_hmac_op foreach_openssl_linked_ctr_hmac_op #undef _ #define _(a, b) \ @@ -737,7 +751,7 @@ crypto_openssl_init (vnet_crypto_engine_registration_t *r) #undef _ #define _(n, c, m, t) cm->ctx_fn[VNET_CRYPTO_ALG_##n] = openssl_ctx_##n; - foreach_openssl_linked_cbc_hmac_op + foreach_openssl_linked_cbc_hmac_op foreach_openssl_linked_ctr_hmac_op #undef _ #define _(a, b) cm->ctx_fn[VNET_CRYPTO_ALG_HMAC_##a] = openssl_ctx_hmac_##a; @@ -774,7 +788,7 @@ vnet_crypto_engine_op_handlers_t op_handlers[] = { { .opt = VNET_CRYPTO_OP_##n##_DEC, \ .fn = openssl_ops_dec_##n, \ .cfn = openssl_ops_dec_chained_##n }, - foreach_openssl_linked_cbc_hmac_op + foreach_openssl_linked_cbc_hmac_op foreach_openssl_linked_ctr_hmac_op #undef _ #define _(a, b) \ { .opt = VNET_CRYPTO_OP_##a##_HMAC, \ -- 2.16.6