X-Git-Url: https://gerrit.fd.io/r/gitweb?a=blobdiff_plain;f=src%2Fplugins%2Fcrypto_openssl%2Fmain.c;h=b070cf336a5e418249b6225a209196839ad0b867;hb=HEAD;hp=6843880eed0d25a371b7387a837537d6c2af0278;hpb=fd78a1f65b3e698b0e99f29584b060750b89bdab;p=vpp.git diff --git a/src/plugins/crypto_openssl/main.c b/src/plugins/crypto_openssl/main.c index 6843880eed0..c59b5d34a29 100644 --- a/src/plugins/crypto_openssl/main.c +++ b/src/plugins/crypto_openssl/main.c @@ -15,7 +15,7 @@ *------------------------------------------------------------------ */ -#include +#include #include #include @@ -26,12 +26,14 @@ #include #include #include +#include typedef struct { CLIB_CACHE_LINE_ALIGN_MARK (cacheline0); - EVP_CIPHER_CTX *evp_cipher_ctx; - HMAC_CTX *hmac_ctx; + EVP_CIPHER_CTX **evp_cipher_enc_ctx; + EVP_CIPHER_CTX **evp_cipher_dec_ctx; + HMAC_CTX **hmac_ctx; EVP_MD_CTX *hash_ctx; #if OPENSSL_VERSION_NUMBER < 0x10100000L HMAC_CTX _hmac_ctx; @@ -51,7 +53,10 @@ static openssl_per_thread_data_t *per_thread_data = 0; _ (gcm, AES_256_GCM, EVP_aes_256_gcm, 8) \ _ (cbc, AES_128_CTR, EVP_aes_128_ctr, 8) \ _ (cbc, AES_192_CTR, EVP_aes_192_ctr, 8) \ - _ (cbc, AES_256_CTR, EVP_aes_256_ctr, 8) + _ (cbc, AES_256_CTR, EVP_aes_256_ctr, 8) \ + _ (null_gmac, AES_128_NULL_GMAC, EVP_aes_128_gcm, 8) \ + _ (null_gmac, AES_192_NULL_GMAC, EVP_aes_192_gcm, 8) \ + _ (null_gmac, AES_256_NULL_GMAC, EVP_aes_256_gcm, 8) #define foreach_openssl_chacha20_evp_op \ _ (chacha20_poly1305, CHACHA20_POLY1305, EVP_chacha20_poly1305, 8) @@ -86,6 +91,8 @@ static openssl_per_thread_data_t *per_thread_data = 0; _(SHA384, EVP_sha384) \ _(SHA512, EVP_sha512) +crypto_openssl_main_t crypto_openssl_main; + static_always_inline u32 openssl_ops_enc_cbc (vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, @@ -93,7 +100,7 @@ openssl_ops_enc_cbc (vlib_main_t *vm, vnet_crypto_op_t *ops[], { openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data, vm->thread_index); - EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx; + EVP_CIPHER_CTX *ctx; vnet_crypto_op_chunk_t *chp; u32 i, j, curr_len = 0; u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5]; @@ -101,16 +108,10 @@ openssl_ops_enc_cbc (vlib_main_t *vm, vnet_crypto_op_t *ops[], for (i = 0; i < n_ops; i++) { vnet_crypto_op_t *op = ops[i]; - vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index); int out_len = 0; - if (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV) - RAND_bytes (op->iv, iv_len); - - EVP_EncryptInit_ex (ctx, cipher, NULL, key->data, op->iv); - - if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS) - EVP_CIPHER_CTX_set_padding (ctx, 0); + ctx = ptd->evp_cipher_enc_ctx[op->key_index]; + EVP_EncryptInit_ex (ctx, NULL, NULL, NULL, op->iv); if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS) { @@ -154,7 +155,7 @@ openssl_ops_dec_cbc (vlib_main_t *vm, vnet_crypto_op_t *ops[], { openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data, vm->thread_index); - EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx; + EVP_CIPHER_CTX *ctx; vnet_crypto_op_chunk_t *chp; u32 i, j, curr_len = 0; u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5]; @@ -162,13 +163,10 @@ openssl_ops_dec_cbc (vlib_main_t *vm, vnet_crypto_op_t *ops[], for (i = 0; i < n_ops; i++) { vnet_crypto_op_t *op = ops[i]; - vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index); int out_len = 0; - EVP_DecryptInit_ex (ctx, cipher, NULL, key->data, op->iv); - - if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS) - EVP_CIPHER_CTX_set_padding (ctx, 0); + ctx = ptd->evp_cipher_dec_ctx[op->key_index]; + EVP_DecryptInit_ex (ctx, NULL, NULL, NULL, op->iv); if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS) { @@ -208,26 +206,32 @@ openssl_ops_dec_cbc (vlib_main_t *vm, vnet_crypto_op_t *ops[], static_always_inline u32 openssl_ops_enc_aead (vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, - const EVP_CIPHER *cipher, int is_gcm, const int iv_len) + const EVP_CIPHER *cipher, int is_gcm, int is_gmac, + const int iv_len) { openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data, vm->thread_index); - EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx; + EVP_CIPHER_CTX *ctx; vnet_crypto_op_chunk_t *chp; u32 i, j; for (i = 0; i < n_ops; i++) { vnet_crypto_op_t *op = ops[i]; - vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index); int len = 0; - if (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV) - RAND_bytes (op->iv, 8); + if (i + 2 < n_ops) + { + CLIB_PREFETCH (ops[i + 1]->src, 4 * CLIB_CACHE_PREFETCH_BYTES, LOAD); + CLIB_PREFETCH (ops[i + 1]->dst, 4 * CLIB_CACHE_PREFETCH_BYTES, + STORE); + + CLIB_PREFETCH (ops[i + 2]->src, 4 * CLIB_CACHE_PREFETCH_BYTES, LOAD); + CLIB_PREFETCH (ops[i + 2]->dst, 4 * CLIB_CACHE_PREFETCH_BYTES, + STORE); + } - EVP_EncryptInit_ex (ctx, cipher, 0, 0, 0); - if (is_gcm) - EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, NULL); - EVP_EncryptInit_ex (ctx, 0, 0, key->data, op->iv); + ctx = ptd->evp_cipher_enc_ctx[op->key_index]; + EVP_EncryptInit_ex (ctx, 0, 0, NULL, op->iv); if (op->aad_len) EVP_EncryptUpdate (ctx, NULL, &len, op->aad, op->aad_len); if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS) @@ -235,26 +239,36 @@ openssl_ops_enc_aead (vlib_main_t *vm, vnet_crypto_op_t *ops[], chp = chunks + op->chunk_index; for (j = 0; j < op->n_chunks; j++) { - EVP_EncryptUpdate (ctx, chp->dst, &len, chp->src, chp->len); + EVP_EncryptUpdate (ctx, is_gmac ? 0 : chp->dst, &len, chp->src, + chp->len); chp += 1; } } else - EVP_EncryptUpdate (ctx, op->dst, &len, op->src, op->len); - EVP_EncryptFinal_ex (ctx, op->dst + len, &len); + EVP_EncryptUpdate (ctx, is_gmac ? 0 : op->dst, &len, op->src, op->len); + EVP_EncryptFinal_ex (ctx, is_gmac ? 0 : op->dst + len, &len); EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_GET_TAG, op->tag_len, op->tag); op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; } return n_ops; } +static_always_inline u32 +openssl_ops_enc_null_gmac (vlib_main_t *vm, vnet_crypto_op_t *ops[], + vnet_crypto_op_chunk_t *chunks, u32 n_ops, + const EVP_CIPHER *cipher, const int iv_len) +{ + return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher, + /* is_gcm */ 1, /* is_gmac */ 1, iv_len); +} + static_always_inline u32 openssl_ops_enc_gcm (vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher, const int iv_len) { return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher, - /* is_gcm */ 1, iv_len); + /* is_gcm */ 1, /* is_gmac */ 0, iv_len); } static_always_inline __clib_unused u32 @@ -263,29 +277,27 @@ openssl_ops_enc_chacha20_poly1305 (vlib_main_t *vm, vnet_crypto_op_t *ops[], const EVP_CIPHER *cipher, const int iv_len) { return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher, - /* is_gcm */ 0, iv_len); + /* is_gcm */ 0, /* is_gmac */ 0, iv_len); } static_always_inline u32 openssl_ops_dec_aead (vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, - const EVP_CIPHER *cipher, int is_gcm, const int iv_len) + const EVP_CIPHER *cipher, int is_gcm, int is_gmac, + const int iv_len) { openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data, vm->thread_index); - EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx; + EVP_CIPHER_CTX *ctx; vnet_crypto_op_chunk_t *chp; u32 i, j, n_fail = 0; for (i = 0; i < n_ops; i++) { vnet_crypto_op_t *op = ops[i]; - vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index); int len = 0; - EVP_DecryptInit_ex (ctx, cipher, 0, 0, 0); - if (is_gcm) - EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, 0); - EVP_DecryptInit_ex (ctx, 0, 0, key->data, op->iv); + ctx = ptd->evp_cipher_dec_ctx[op->key_index]; + EVP_DecryptInit_ex (ctx, 0, 0, NULL, op->iv); if (op->aad_len) EVP_DecryptUpdate (ctx, 0, &len, op->aad, op->aad_len); if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS) @@ -293,15 +305,19 @@ openssl_ops_dec_aead (vlib_main_t *vm, vnet_crypto_op_t *ops[], chp = chunks + op->chunk_index; for (j = 0; j < op->n_chunks; j++) { - EVP_DecryptUpdate (ctx, chp->dst, &len, chp->src, chp->len); + EVP_DecryptUpdate (ctx, is_gmac ? 0 : chp->dst, &len, chp->src, + chp->len); chp += 1; } } else - EVP_DecryptUpdate (ctx, op->dst, &len, op->src, op->len); + { + EVP_DecryptUpdate (ctx, is_gmac ? 0 : op->dst, &len, op->src, + op->len); + } EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_SET_TAG, op->tag_len, op->tag); - if (EVP_DecryptFinal_ex (ctx, op->dst + len, &len) > 0) + if (EVP_DecryptFinal_ex (ctx, is_gmac ? 0 : op->dst + len, &len) > 0) op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; else { @@ -312,13 +328,22 @@ openssl_ops_dec_aead (vlib_main_t *vm, vnet_crypto_op_t *ops[], return n_ops - n_fail; } +static_always_inline u32 +openssl_ops_dec_null_gmac (vlib_main_t *vm, vnet_crypto_op_t *ops[], + vnet_crypto_op_chunk_t *chunks, u32 n_ops, + const EVP_CIPHER *cipher, const int iv_len) +{ + return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher, + /* is_gcm */ 1, /* is_gmac */ 1, iv_len); +} + static_always_inline u32 openssl_ops_dec_gcm (vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher, const int iv_len) { return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher, - /* is_gcm */ 1, iv_len); + /* is_gcm */ 1, /* is_gmac */ 0, iv_len); } static_always_inline __clib_unused u32 @@ -327,7 +352,7 @@ openssl_ops_dec_chacha20_poly1305 (vlib_main_t *vm, vnet_crypto_op_t *ops[], const EVP_CIPHER *cipher, const int iv_len) { return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher, - /* is_gcm */ 0, iv_len); + /* is_gcm */ 0, /* is_gmac */ 0, iv_len); } static_always_inline u32 @@ -372,17 +397,17 @@ openssl_ops_hmac (vlib_main_t * vm, vnet_crypto_op_t * ops[], u8 buffer[64]; openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data, vm->thread_index); - HMAC_CTX *ctx = ptd->hmac_ctx; + HMAC_CTX *ctx; vnet_crypto_op_chunk_t *chp; u32 i, j, n_fail = 0; for (i = 0; i < n_ops; i++) { vnet_crypto_op_t *op = ops[i]; - vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index); unsigned int out_len = 0; size_t sz = op->digest_len ? op->digest_len : EVP_MD_size (md); - HMAC_Init_ex (ctx, key->data, vec_len (key->data), md, NULL); + ctx = ptd->hmac_ctx[op->key_index]; + HMAC_Init_ex (ctx, NULL, 0, NULL, NULL); if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS) { chp = chunks + op->chunk_index; @@ -412,6 +437,131 @@ openssl_ops_hmac (vlib_main_t * vm, vnet_crypto_op_t * ops[], return n_ops - n_fail; } +static_always_inline void * +openssl_ctx_cipher (vnet_crypto_key_t *key, vnet_crypto_key_op_t kop, + vnet_crypto_key_index_t idx, const EVP_CIPHER *cipher, + int is_gcm) +{ + EVP_CIPHER_CTX *ctx; + openssl_per_thread_data_t *ptd; + + if (VNET_CRYPTO_KEY_OP_ADD == kop) + { + vec_foreach (ptd, per_thread_data) + { + vec_validate_aligned (ptd->evp_cipher_enc_ctx, idx, + CLIB_CACHE_LINE_BYTES); + vec_validate_aligned (ptd->evp_cipher_dec_ctx, idx, + CLIB_CACHE_LINE_BYTES); + + ctx = EVP_CIPHER_CTX_new (); + EVP_CIPHER_CTX_set_padding (ctx, 0); + EVP_EncryptInit_ex (ctx, cipher, NULL, NULL, NULL); + if (is_gcm) + EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, NULL); + EVP_EncryptInit_ex (ctx, 0, 0, key->data, 0); + ptd->evp_cipher_enc_ctx[idx] = ctx; + + ctx = EVP_CIPHER_CTX_new (); + EVP_CIPHER_CTX_set_padding (ctx, 0); + EVP_DecryptInit_ex (ctx, cipher, 0, 0, 0); + if (is_gcm) + EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, 0); + EVP_DecryptInit_ex (ctx, 0, 0, key->data, 0); + ptd->evp_cipher_dec_ctx[idx] = ctx; + } + } + else if (VNET_CRYPTO_KEY_OP_MODIFY == kop) + { + vec_foreach (ptd, per_thread_data) + { + ctx = ptd->evp_cipher_enc_ctx[idx]; + EVP_EncryptInit_ex (ctx, cipher, NULL, NULL, NULL); + if (is_gcm) + EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, NULL); + EVP_EncryptInit_ex (ctx, 0, 0, key->data, 0); + + ctx = ptd->evp_cipher_dec_ctx[idx]; + EVP_DecryptInit_ex (ctx, cipher, 0, 0, 0); + if (is_gcm) + EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, 0); + EVP_DecryptInit_ex (ctx, 0, 0, key->data, 0); + } + } + else if (VNET_CRYPTO_KEY_OP_DEL == kop) + { + vec_foreach (ptd, per_thread_data) + { + ctx = ptd->evp_cipher_enc_ctx[idx]; + EVP_CIPHER_CTX_free (ctx); + ptd->evp_cipher_enc_ctx[idx] = NULL; + + ctx = ptd->evp_cipher_dec_ctx[idx]; + EVP_CIPHER_CTX_free (ctx); + ptd->evp_cipher_dec_ctx[idx] = NULL; + } + } + return NULL; +} + +static_always_inline void * +openssl_ctx_hmac (vnet_crypto_key_t *key, vnet_crypto_key_op_t kop, + vnet_crypto_key_index_t idx, const EVP_MD *md) +{ + HMAC_CTX *ctx; + openssl_per_thread_data_t *ptd; + if (VNET_CRYPTO_KEY_OP_ADD == kop) + { + vec_foreach (ptd, per_thread_data) + { + vec_validate_aligned (ptd->hmac_ctx, idx, CLIB_CACHE_LINE_BYTES); +#if OPENSSL_VERSION_NUMBER >= 0x10100000L + ctx = HMAC_CTX_new (); + HMAC_Init_ex (ctx, key->data, vec_len (key->data), md, NULL); + ptd->hmac_ctx[idx] = ctx; +#else + HMAC_CTX_init (&(ptd->_hmac_ctx)); + ptd->hmac_ctx[idx] = &ptd->_hmac_ctx; +#endif + } + } + else if (VNET_CRYPTO_KEY_OP_MODIFY == kop) + { + vec_foreach (ptd, per_thread_data) + { + ctx = ptd->hmac_ctx[idx]; + HMAC_Init_ex (ctx, key->data, vec_len (key->data), md, NULL); + } + } + else if (VNET_CRYPTO_KEY_OP_DEL == kop) + { + vec_foreach (ptd, per_thread_data) + { + ctx = ptd->hmac_ctx[idx]; + HMAC_CTX_free (ctx); + ptd->hmac_ctx[idx] = NULL; + } + } + return NULL; +} + +static void +crypto_openssl_key_handler (vlib_main_t *vm, vnet_crypto_key_op_t kop, + vnet_crypto_key_index_t idx) +{ + vnet_crypto_key_t *key = vnet_crypto_get_key (idx); + crypto_openssl_main_t *cm = &crypto_openssl_main; + + /** TODO: add linked alg support **/ + if (key->type == VNET_CRYPTO_KEY_TYPE_LINK) + return; + + if (cm->ctx_fn[key->alg] == 0) + return; + + cm->ctx_fn[key->alg](key, kop, idx); +} + #define _(m, a, b, iv) \ static u32 openssl_ops_enc_##a (vlib_main_t *vm, vnet_crypto_op_t *ops[], \ u32 n_ops) \ @@ -437,6 +587,16 @@ openssl_ops_hmac (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \ { \ return openssl_ops_dec_##m (vm, ops, chunks, n_ops, b (), iv); \ + } \ + static void *openssl_ctx_##a (vnet_crypto_key_t *key, \ + vnet_crypto_key_op_t kop, \ + vnet_crypto_key_index_t idx) \ + { \ + int is_gcm = ((VNET_CRYPTO_ALG_AES_128_GCM <= key->alg) && \ + (VNET_CRYPTO_ALG_AES_256_NULL_GMAC >= key->alg)) ? \ + 1 : \ + 0; \ + return openssl_ctx_cipher (key, kop, idx, b (), is_gcm); \ } foreach_openssl_evp_op; @@ -458,32 +618,43 @@ foreach_openssl_evp_op; foreach_openssl_hash_op; #undef _ -#define _(a, b) \ -static u32 \ -openssl_ops_hmac_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \ -{ return openssl_ops_hmac (vm, ops, 0, n_ops, b ()); } \ -static u32 \ -openssl_ops_hmac_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \ - vnet_crypto_op_chunk_t *chunks, u32 n_ops) \ -{ return openssl_ops_hmac (vm, ops, chunks, n_ops, b ()); } \ +#define _(a, b) \ + static u32 openssl_ops_hmac_##a (vlib_main_t *vm, vnet_crypto_op_t *ops[], \ + u32 n_ops) \ + { \ + return openssl_ops_hmac (vm, ops, 0, n_ops, b ()); \ + } \ + static u32 openssl_ops_hmac_chained_##a ( \ + vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \ + u32 n_ops) \ + { \ + return openssl_ops_hmac (vm, ops, chunks, n_ops, b ()); \ + } \ + static void *openssl_ctx_hmac_##a (vnet_crypto_key_t *key, \ + vnet_crypto_key_op_t kop, \ + vnet_crypto_key_index_t idx) \ + { \ + return openssl_ctx_hmac (key, kop, idx, b ()); \ + } foreach_openssl_hmac_op; #undef _ - clib_error_t * crypto_openssl_init (vlib_main_t * vm) { + crypto_openssl_main_t *cm = &crypto_openssl_main; vlib_thread_main_t *tm = vlib_get_thread_main (); openssl_per_thread_data_t *ptd; u8 seed[32]; - if (getrandom (&seed, sizeof (seed), 0) != sizeof (seed)) + if (syscall (SYS_getrandom, &seed, sizeof (seed), 0) != sizeof (seed)) return clib_error_return_unix (0, "getrandom() failed"); RAND_seed (seed, sizeof (seed)); u32 eidx = vnet_crypto_register_engine (vm, "openssl", 50, "OpenSSL"); + cm->crypto_engine_index = eidx; #define _(m, a, b, iv) \ vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \ @@ -491,15 +662,17 @@ crypto_openssl_init (vlib_main_t * vm) openssl_ops_enc_chained_##a); \ vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \ openssl_ops_dec_##a, \ - openssl_ops_dec_chained_##a); + openssl_ops_dec_chained_##a); \ + cm->ctx_fn[VNET_CRYPTO_ALG_##a] = openssl_ctx_##a; foreach_openssl_evp_op; #undef _ -#define _(a, b) \ - vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \ - openssl_ops_hmac_##a, \ - openssl_ops_hmac_chained_##a); \ +#define _(a, b) \ + vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \ + openssl_ops_hmac_##a, \ + openssl_ops_hmac_chained_##a); \ + cm->ctx_fn[VNET_CRYPTO_ALG_HMAC_##a] = openssl_ctx_hmac_##a; foreach_openssl_hmac_op; #undef _ @@ -517,33 +690,25 @@ crypto_openssl_init (vlib_main_t * vm) vec_foreach (ptd, per_thread_data) { - ptd->evp_cipher_ctx = EVP_CIPHER_CTX_new (); #if OPENSSL_VERSION_NUMBER >= 0x10100000L - ptd->hmac_ctx = HMAC_CTX_new (); ptd->hash_ctx = EVP_MD_CTX_create (); -#else - HMAC_CTX_init (&(ptd->_hmac_ctx)); - ptd->hmac_ctx = &ptd->_hmac_ctx; #endif } - + vnet_crypto_register_key_handler (vm, cm->crypto_engine_index, + crypto_openssl_key_handler); return 0; } -/* *INDENT-OFF* */ VLIB_INIT_FUNCTION (crypto_openssl_init) = { .runs_after = VLIB_INITS ("vnet_crypto_init"), }; -/* *INDENT-ON* */ -/* *INDENT-OFF* */ VLIB_PLUGIN_REGISTER () = { .version = VPP_BUILD_VER, .description = "OpenSSL Crypto Engine", }; -/* *INDENT-ON* */ /* * fd.io coding-style-patch-verification: ON