X-Git-Url: https://gerrit.fd.io/r/gitweb?a=blobdiff_plain;f=src%2Fvnet%2Fcrypto%2Fcrypto.h;h=a34d3c668d88c8f1a06d47ea95ee9191f2e76dc1;hb=84e665848;hp=07a73f14d0434274ed6f118d55165b43f880dcca;hpb=0c936b147f4fc52bdb685f701bc7d93959a1a6a2;p=vpp.git diff --git a/src/vnet/crypto/crypto.h b/src/vnet/crypto/crypto.h index 07a73f14d04..a34d3c668d8 100644 --- a/src/vnet/crypto/crypto.h +++ b/src/vnet/crypto/crypto.h @@ -19,6 +19,7 @@ #include #define VNET_CRYPTO_FRAME_SIZE 64 +#define VNET_CRYPTO_FRAME_POOL_SIZE 1024 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */ #define foreach_crypto_cipher_alg \ @@ -32,11 +33,21 @@ _(AES_256_CTR, "aes-256-ctr", 32) /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */ -#define foreach_crypto_aead_alg \ - _(AES_128_GCM, "aes-128-gcm", 16) \ - _(AES_192_GCM, "aes-192-gcm", 24) \ - _(AES_256_GCM, "aes-256-gcm", 32) \ - _(CHACHA20_POLY1305, "chacha20-poly1305", 32) +#define foreach_crypto_aead_alg \ + _ (AES_128_GCM, "aes-128-gcm", 16) \ + _ (AES_192_GCM, "aes-192-gcm", 24) \ + _ (AES_256_GCM, "aes-256-gcm", 32) \ + _ (AES_128_NULL_GMAC, "aes-128-null-gmac", 16) \ + _ (AES_192_NULL_GMAC, "aes-192-null-gmac", 24) \ + _ (AES_256_NULL_GMAC, "aes-256-null-gmac", 32) \ + _ (CHACHA20_POLY1305, "chacha20-poly1305", 32) + +#define foreach_crypto_hash_alg \ + _ (SHA1, "sha-1") \ + _ (SHA224, "sha-224") \ + _ (SHA256, "sha-256") \ + _ (SHA384, "sha-384") \ + _ (SHA512, "sha-512") #define foreach_crypto_hmac_alg \ _(MD5, "md5") \ @@ -46,12 +57,13 @@ _(SHA384, "sha-384") \ _(SHA512, "sha-512") -#define foreach_crypto_op_type \ - _(ENCRYPT, "encrypt") \ - _(DECRYPT, "decrypt") \ - _(AEAD_ENCRYPT, "aead-encrypt") \ - _(AEAD_DECRYPT, "aead-decrypt") \ - _(HMAC, "hmac") +#define foreach_crypto_op_type \ + _ (ENCRYPT, "encrypt") \ + _ (DECRYPT, "decrypt") \ + _ (AEAD_ENCRYPT, "aead-encrypt") \ + _ (AEAD_DECRYPT, "aead-decrypt") \ + _ (HMAC, "hmac") \ + _ (HASH, "hash") typedef enum { @@ -73,33 +85,52 @@ typedef enum /** async crypto **/ /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, TAG_LEN, AAD_LEN */ -#define foreach_crypto_aead_async_alg \ - _(AES_128_GCM, "aes-128-gcm-aad8", 16, 16, 8) \ - _(AES_128_GCM, "aes-128-gcm-aad12", 16, 16, 12) \ - _(AES_192_GCM, "aes-192-gcm-aad8", 24, 16, 8) \ - _(AES_192_GCM, "aes-192-gcm-aad12", 24, 16, 12) \ - _(AES_256_GCM, "aes-256-gcm-aad8", 32, 16, 8) \ - _(AES_256_GCM, "aes-256-gcm-aad12", 32, 16, 12) \ - _(CHACHA20_POLY1305, "chacha20-poly1305-aad8", 32, 16, 8) \ - _(CHACHA20_POLY1305, "chacha20-poly1305-aad12", 32, 16, 12) +#define foreach_crypto_aead_async_alg \ + _ (AES_128_GCM, "aes-128-gcm-aad8", 16, 16, 8) \ + _ (AES_128_GCM, "aes-128-gcm-aad12", 16, 16, 12) \ + _ (AES_192_GCM, "aes-192-gcm-aad8", 24, 16, 8) \ + _ (AES_192_GCM, "aes-192-gcm-aad12", 24, 16, 12) \ + _ (AES_256_GCM, "aes-256-gcm-aad8", 32, 16, 8) \ + _ (AES_256_GCM, "aes-256-gcm-aad12", 32, 16, 12) \ + _ (AES_128_NULL_GMAC, "aes-128-null-gmac-aad8", 16, 16, 8) \ + _ (AES_128_NULL_GMAC, "aes-128-null-gmac-aad12", 16, 16, 12) \ + _ (AES_192_NULL_GMAC, "aes-192-null-gmac-aad8", 24, 16, 8) \ + _ (AES_192_NULL_GMAC, "aes-192-null-gmac-aad12", 24, 16, 12) \ + _ (AES_256_NULL_GMAC, "aes-256-null-gmac-aad8", 32, 16, 8) \ + _ (AES_256_NULL_GMAC, "aes-256-null-gmac-aad12", 32, 16, 12) \ + _ (CHACHA20_POLY1305, "chacha20-poly1305-aad8", 32, 16, 8) \ + _ (CHACHA20_POLY1305, "chacha20-poly1305-aad12", 32, 16, 12) \ + _ (CHACHA20_POLY1305, "chacha20-poly1305", 32, 16, 0) /* CRYPTO_ID, INTEG_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, DIGEST_LEN */ -#define foreach_crypto_link_async_alg \ - _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12) \ - _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12) \ - _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12) \ - _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14) \ - _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14) \ - _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14) \ - _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16) \ - _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16) \ - _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16) \ - _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24) \ - _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24) \ - _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24) \ - _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32) \ - _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32) \ - _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32) +#define foreach_crypto_link_async_alg \ + _ (3DES_CBC, MD5, "3des-cbc-hmac-md5", 24, 12) \ + _ (AES_128_CBC, MD5, "aes-128-cbc-hmac-md5", 16, 12) \ + _ (AES_192_CBC, MD5, "aes-192-cbc-hmac-md5", 24, 12) \ + _ (AES_256_CBC, MD5, "aes-256-cbc-hmac-md5", 32, 12) \ + _ (3DES_CBC, SHA1, "3des-cbc-hmac-sha-1", 24, 12) \ + _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12) \ + _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12) \ + _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12) \ + _ (3DES_CBC, SHA224, "3des-cbc-hmac-sha-224", 24, 14) \ + _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14) \ + _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14) \ + _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14) \ + _ (3DES_CBC, SHA256, "3des-cbc-hmac-sha-256", 24, 16) \ + _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16) \ + _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16) \ + _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16) \ + _ (3DES_CBC, SHA384, "3des-cbc-hmac-sha-384", 24, 24) \ + _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24) \ + _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24) \ + _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24) \ + _ (3DES_CBC, SHA512, "3des-cbc-hmac-sha-512", 24, 32) \ + _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32) \ + _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32) \ + _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32) \ + _ (AES_128_CTR, SHA1, "aes-128-ctr-hmac-sha-1", 16, 12) \ + _ (AES_192_CTR, SHA1, "aes-192-ctr-hmac-sha-1", 24, 12) \ + _ (AES_256_CTR, SHA1, "aes-256-ctr-hmac-sha-1", 32, 12) #define foreach_crypto_async_op_type \ _(ENCRYPT, "async-encrypt") \ @@ -125,13 +156,15 @@ typedef enum { VNET_CRYPTO_ALG_NONE = 0, #define _(n, s, l) VNET_CRYPTO_ALG_##n, - foreach_crypto_cipher_alg - foreach_crypto_aead_alg + foreach_crypto_cipher_alg foreach_crypto_aead_alg #undef _ #define _(n, s) VNET_CRYPTO_ALG_HMAC_##n, - foreach_crypto_hmac_alg + foreach_crypto_hmac_alg #undef _ - VNET_CRYPTO_N_ALGS, +#define _(n, s) VNET_CRYPTO_ALG_HASH_##n, + foreach_crypto_hash_alg +#undef _ + VNET_CRYPTO_N_ALGS, } vnet_crypto_alg_t; typedef enum @@ -197,13 +230,15 @@ typedef enum { VNET_CRYPTO_OP_NONE = 0, #define _(n, s, l) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC, - foreach_crypto_cipher_alg - foreach_crypto_aead_alg + foreach_crypto_cipher_alg foreach_crypto_aead_alg #undef _ #define _(n, s) VNET_CRYPTO_OP_##n##_HMAC, - foreach_crypto_hmac_alg + foreach_crypto_hmac_alg +#undef _ +#define _(n, s) VNET_CRYPTO_OP_##n##_HASH, + foreach_crypto_hash_alg #undef _ - VNET_CRYPTO_N_OP_IDS, + VNET_CRYPTO_N_OP_IDS, } vnet_crypto_op_id_t; /* *INDENT-ON* */ @@ -234,9 +269,8 @@ typedef struct vnet_crypto_op_id_t op:16; vnet_crypto_op_status_t status:8; u8 flags; -#define VNET_CRYPTO_OP_FLAG_INIT_IV (1 << 0) -#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK (1 << 1) -#define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS (1 << 2) +#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK (1 << 0) +#define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS (1 << 1) union { @@ -300,13 +334,6 @@ typedef struct typedef struct { - vnet_crypto_op_status_t status:8; - u32 key_index; - i16 crypto_start_offset; /* first buffer offset */ - i16 integ_start_offset; - u32 crypto_total_length; - /* adj total_length for integ, e.g.4 bytes for IPSec ESN */ - u16 integ_length_adj; u8 *iv; union { @@ -314,18 +341,33 @@ typedef struct u8 *tag; }; u8 *aad; + u32 key_index; + u32 crypto_total_length; + i16 crypto_start_offset; /* first buffer offset */ + i16 integ_start_offset; + /* adj total_length for integ, e.g.4 bytes for IPSec ESN */ + i16 integ_length_adj; + vnet_crypto_op_status_t status : 8; u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */ } vnet_crypto_async_frame_elt_t; +/* Assert the size so the compiler will warn us when it changes */ +STATIC_ASSERT_SIZEOF (vnet_crypto_async_frame_elt_t, 5 * sizeof (u64)); + +typedef enum vnet_crypto_async_frame_state_t_ +{ + VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED, + /* frame waiting to be processed */ + VNET_CRYPTO_FRAME_STATE_PENDING, + VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS, + VNET_CRYPTO_FRAME_STATE_SUCCESS, + VNET_CRYPTO_FRAME_STATE_ELT_ERROR +} __clib_packed vnet_crypto_async_frame_state_t; + typedef struct { CLIB_CACHE_LINE_ALIGN_MARK (cacheline0); -#define VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED 0 -#define VNET_CRYPTO_FRAME_STATE_PENDING 1 /* frame waiting to be processed */ -#define VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS 2 -#define VNET_CRYPTO_FRAME_STATE_SUCCESS 3 -#define VNET_CRYPTO_FRAME_STATE_ELT_ERROR 4 - u8 state; + vnet_crypto_async_frame_state_t state; vnet_crypto_async_op_id_t op:8; u16 n_elts; vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE]; @@ -337,9 +379,8 @@ typedef struct typedef struct { CLIB_CACHE_LINE_ALIGN_MARK (cacheline0); - vnet_crypto_async_frame_t *frames[VNET_CRYPTO_ASYNC_OP_N_IDS]; vnet_crypto_async_frame_t *frame_pool; - u32 *buffer_indice; + u32 *buffer_indices; u16 *nexts; } vnet_crypto_thread_t; @@ -390,12 +431,15 @@ void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index, /** async crypto register functions */ u32 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name); -void vnet_crypto_register_async_handler (vlib_main_t * vm, - u32 engine_index, - vnet_crypto_async_op_id_t opt, - vnet_crypto_frame_enqueue_t * enq_fn, - vnet_crypto_frame_dequeue_t * - deq_fn); + +void +vnet_crypto_register_enqueue_handler (vlib_main_t *vm, u32 engine_index, + vnet_crypto_async_op_id_t opt, + vnet_crypto_frame_enqueue_t *enq_fn); + +void +vnet_crypto_register_dequeue_handler (vlib_main_t *vm, u32 engine_index, + vnet_crypto_frame_dequeue_t *deq_fn); typedef struct { @@ -407,7 +451,7 @@ typedef struct vnet_crypto_chained_ops_handler_t * chained_ops_handlers[VNET_CRYPTO_N_OP_IDS]; vnet_crypto_frame_enqueue_t *enqueue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS]; - vnet_crypto_frame_dequeue_t *dequeue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS]; + vnet_crypto_frame_dequeue_t *dequeue_handler; } vnet_crypto_engine_t; typedef struct @@ -424,7 +468,6 @@ typedef struct vnet_crypto_chained_ops_handler_t **chained_ops_handlers; vnet_crypto_frame_enqueue_t **enqueue_handlers; vnet_crypto_frame_dequeue_t **dequeue_handlers; - clib_bitmap_t *async_active_ids; vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS]; vnet_crypto_async_op_data_t async_opt_data[VNET_CRYPTO_ASYNC_OP_N_IDS]; vnet_crypto_engine_t *engines; @@ -433,12 +476,8 @@ typedef struct uword *alg_index_by_name; uword *async_alg_index_by_name; vnet_crypto_async_alg_data_t *async_algs; - u32 async_refcnt; vnet_crypto_async_next_node_t *next_nodes; u32 crypto_node_index; -#define VNET_CRYPTO_ASYNC_DISPATCH_POLLING 0 -#define VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT 1 - u8 dispatch_mode; } vnet_crypto_main_t; extern vnet_crypto_main_t crypto_main; @@ -466,21 +505,13 @@ u32 vnet_crypto_key_add_linked (vlib_main_t * vm, vnet_crypto_key_index_t index_crypto, vnet_crypto_key_index_t index_integ); -clib_error_t *crypto_dispatch_enable_disable (int is_enable); - int vnet_crypto_set_async_handler2 (char *alg_name, char *engine); int vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t opt); -void vnet_crypto_request_async_mode (int is_enable); - -void vnet_crypto_set_async_dispatch_mode (u8 mode); - vnet_crypto_async_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg, vnet_crypto_alg_t integ_alg); -clib_error_t *crypto_dispatch_enable_disable (int is_enable); - format_function_t format_vnet_crypto_alg; format_function_t format_vnet_crypto_engine; format_function_t format_vnet_crypto_op; @@ -532,18 +563,19 @@ vnet_crypto_async_get_frame (vlib_main_t * vm, vnet_crypto_async_op_id_t opt) { vnet_crypto_main_t *cm = &crypto_main; vnet_crypto_thread_t *ct = cm->threads + vm->thread_index; - vnet_crypto_async_frame_t *f = ct->frames[opt]; + vnet_crypto_async_frame_t *f = NULL; - if (!f) + if (PREDICT_TRUE (pool_free_elts (ct->frame_pool))) { pool_get_aligned (ct->frame_pool, f, CLIB_CACHE_LINE_BYTES); - if (CLIB_DEBUG > 0) - clib_memset (f, 0xfe, sizeof (*f)); +#if CLIB_DEBUG > 0 + clib_memset (f, 0xfe, sizeof (*f)); +#endif f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED; f->op = opt; f->n_elts = 0; - ct->frames[opt] = f; } + return f; } @@ -562,62 +594,50 @@ vnet_crypto_async_submit_open_frame (vlib_main_t * vm, { vnet_crypto_main_t *cm = &crypto_main; vlib_thread_main_t *tm = vlib_get_thread_main (); - vnet_crypto_thread_t *ct = cm->threads + vm->thread_index; - vnet_crypto_async_op_id_t opt = frame->op; - u32 i = vlib_num_workers () > 0; + u32 i; + vlib_node_t *n; - int ret = (cm->enqueue_handlers[frame->op]) (vm, frame); + frame->state = VNET_CRYPTO_FRAME_STATE_PENDING; frame->enqueue_thread_index = vm->thread_index; - clib_bitmap_set_no_check (cm->async_active_ids, opt, 1); - if (PREDICT_TRUE (ret == 0)) + + if (PREDICT_FALSE (cm->enqueue_handlers == NULL)) { - vnet_crypto_async_frame_t *nf = 0; - frame->state = VNET_CRYPTO_FRAME_STATE_PENDING; - pool_get_aligned (ct->frame_pool, nf, CLIB_CACHE_LINE_BYTES); - if (CLIB_DEBUG > 0) - clib_memset (nf, 0xfe, sizeof (*nf)); - nf->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED; - nf->op = opt; - nf->n_elts = 0; - ct->frames[opt] = nf; + frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR; + return -1; } - if (cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT) + int ret = (cm->enqueue_handlers[frame->op]) (vm, frame); + + if (PREDICT_TRUE (ret == 0)) { - for (; i < tm->n_vlib_mains; i++) + n = vlib_get_node (vm, cm->crypto_node_index); + if (n->state == VLIB_NODE_STATE_INTERRUPT) { - vlib_node_set_interrupt_pending (vlib_mains[i], - cm->crypto_node_index); + for (i = 0; i < tm->n_vlib_mains; i++) + vlib_node_set_interrupt_pending (vlib_get_main_by_index (i), + cm->crypto_node_index); } } + else + { + frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR; + } + return ret; } -static_always_inline int -vnet_crypto_async_add_to_frame (vlib_main_t * vm, - vnet_crypto_async_frame_t ** frame, - u32 key_index, - u32 crypto_len, i16 integ_len_adj, - i16 crypto_start_offset, - u16 integ_start_offset, - u32 buffer_index, - u16 next_node, - u8 * iv, u8 * tag, u8 * aad, u8 flags) -{ - vnet_crypto_async_frame_t *f = *frame; +static_always_inline void +vnet_crypto_async_add_to_frame (vlib_main_t *vm, vnet_crypto_async_frame_t *f, + u32 key_index, u32 crypto_len, + i16 integ_len_adj, i16 crypto_start_offset, + i16 integ_start_offset, u32 buffer_index, + u16 next_node, u8 *iv, u8 *tag, u8 *aad, + u8 flags) +{ vnet_crypto_async_frame_elt_t *fe; u16 index; - if (PREDICT_FALSE (f->n_elts == VNET_CRYPTO_FRAME_SIZE)) - { - vnet_crypto_async_op_id_t opt = f->op; - int ret; - ret = vnet_crypto_async_submit_open_frame (vm, f); - if (PREDICT_FALSE (ret < 0)) - return -1; - f = vnet_crypto_async_get_frame (vm, opt); - *frame = f; - } + ASSERT (f->n_elts < VNET_CRYPTO_FRAME_SIZE); index = f->n_elts; fe = &f->elts[index]; @@ -633,8 +653,6 @@ vnet_crypto_async_add_to_frame (vlib_main_t * vm, fe->flags = flags; f->buffer_indices[index] = buffer_index; f->next_node_index[index] = next_node; - - return 0; } static_always_inline void @@ -642,7 +660,8 @@ vnet_crypto_async_reset_frame (vnet_crypto_async_frame_t * f) { vnet_crypto_async_op_id_t opt; ASSERT (f != 0); - ASSERT (f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED); + ASSERT ((f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED + || f->state == VNET_CRYPTO_FRAME_STATE_ELT_ERROR)); opt = f->op; if (CLIB_DEBUG > 0) clib_memset (f, 0xfe, sizeof (*f)); @@ -651,6 +670,12 @@ vnet_crypto_async_reset_frame (vnet_crypto_async_frame_t * f) f->n_elts = 0; } +static_always_inline u8 +vnet_crypto_async_frame_is_full (const vnet_crypto_async_frame_t *f) +{ + return (f->n_elts == VNET_CRYPTO_FRAME_SIZE); +} + #endif /* included_vnet_crypto_crypto_h */ /*