X-Git-Url: https://gerrit.fd.io/r/gitweb?a=blobdiff_plain;f=src%2Fvnet%2Fcrypto%2Fcrypto.c;h=3b1505ad4486d5b7ef891b3f816789d413be80ab;hb=06111a837;hp=1caff71b3e23321f84dc6a67fb97aeea9ba2ebbe;hpb=d26b8607c9f9a4385bf7af0a01b1a42efaed5053;p=vpp.git diff --git a/src/vnet/crypto/crypto.c b/src/vnet/crypto/crypto.c index 1caff71b3e2..3b1505ad448 100644 --- a/src/vnet/crypto/crypto.c +++ b/src/vnet/crypto/crypto.c @@ -61,7 +61,6 @@ vnet_crypto_process_ops_call_handler (vlib_main_t * vm, return rv; } - static_always_inline u32 vnet_crypto_process_ops_inline (vlib_main_t * vm, vnet_crypto_op_t ops[], vnet_crypto_op_chunk_t * chunks, u32 n_ops) @@ -190,8 +189,17 @@ int vnet_crypto_is_set_handler (vnet_crypto_alg_t alg) { vnet_crypto_main_t *cm = &crypto_main; + vnet_crypto_op_id_t opt = 0; + int i; + + if (alg > vec_len (cm->algs)) + return 0; + + for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++) + if ((opt = cm->algs[alg].op_by_type[i]) != 0) + break; - return (alg < vec_len (cm->ops_handlers) && NULL != cm->ops_handlers[alg]); + return NULL != cm->ops_handlers[opt]; } void @@ -266,6 +274,44 @@ vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index, vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, cfn); } +void +vnet_crypto_register_async_handler (vlib_main_t * vm, u32 engine_index, + vnet_crypto_async_op_id_t opt, + vnet_crypto_frame_enqueue_t * enqueue_hdl, + vnet_crypto_frame_dequeue_t * dequeue_hdl) +{ + vnet_crypto_main_t *cm = &crypto_main; + vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index); + vnet_crypto_async_op_data_t *otd = cm->async_opt_data + opt; + vec_validate_aligned (cm->enqueue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS, + CLIB_CACHE_LINE_BYTES); + vec_validate_aligned (cm->dequeue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS, + CLIB_CACHE_LINE_BYTES); + + /* both enqueue hdl and dequeue hdl should present */ + if (!enqueue_hdl && !dequeue_hdl) + return; + + e->enqueue_handlers[opt] = enqueue_hdl; + e->dequeue_handlers[opt] = dequeue_hdl; + if (otd->active_engine_index_async == ~0) + { + otd->active_engine_index_async = engine_index; + cm->enqueue_handlers[opt] = enqueue_hdl; + cm->dequeue_handlers[opt] = dequeue_hdl; + } + + ae = vec_elt_at_index (cm->engines, otd->active_engine_index_async); + if (ae->priority <= e->priority) + { + otd->active_engine_index_async = engine_index; + cm->enqueue_handlers[opt] = enqueue_hdl; + cm->dequeue_handlers[opt] = dequeue_hdl; + } + + return; +} + void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index, vnet_crypto_key_handler_t * key_handler) @@ -297,7 +343,13 @@ vnet_crypto_key_len_check (vnet_crypto_alg_t alg, u16 length) #define _(n, s) \ case VNET_CRYPTO_ALG_HMAC_##n: \ return 1; - foreach_crypto_hmac_alg + foreach_crypto_hmac_alg +#undef _ + +#define _(n, s) \ + case VNET_CRYPTO_ALG_HASH_##n: \ + return 1; + foreach_crypto_hash_alg #undef _ } @@ -318,10 +370,10 @@ vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data, pool_get_zero (cm->keys, key); index = key - cm->keys; + key->type = VNET_CRYPTO_KEY_TYPE_DATA; key->alg = alg; vec_validate_aligned (key->data, length - 1, CLIB_CACHE_LINE_BYTES); clib_memcpy (key->data, data, length); - /* *INDENT-OFF* */ vec_foreach (engine, cm->engines) if (engine->key_op_handler) @@ -343,11 +395,255 @@ vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index) engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_DEL, index); /* *INDENT-ON* */ - clib_memset (key->data, 0, vec_len (key->data)); - vec_free (key->data); + if (key->type == VNET_CRYPTO_KEY_TYPE_DATA) + { + clib_memset (key->data, 0, vec_len (key->data)); + vec_free (key->data); + } + else if (key->type == VNET_CRYPTO_KEY_TYPE_LINK) + { + key->index_crypto = key->index_integ = 0; + } + pool_put (cm->keys, key); } +vnet_crypto_async_alg_t +vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg, + vnet_crypto_alg_t integ_alg) +{ +#define _(c, h, s, k ,d) \ + if (crypto_alg == VNET_CRYPTO_ALG_##c && \ + integ_alg == VNET_CRYPTO_ALG_HMAC_##h) \ + return VNET_CRYPTO_ALG_##c##_##h##_TAG##d; + foreach_crypto_link_async_alg +#undef _ + return ~0; +} + +u32 +vnet_crypto_key_add_linked (vlib_main_t * vm, + vnet_crypto_key_index_t index_crypto, + vnet_crypto_key_index_t index_integ) +{ + u32 index; + vnet_crypto_main_t *cm = &crypto_main; + vnet_crypto_engine_t *engine; + vnet_crypto_key_t *key_crypto, *key_integ, *key; + vnet_crypto_async_alg_t linked_alg; + + key_crypto = pool_elt_at_index (cm->keys, index_crypto); + key_integ = pool_elt_at_index (cm->keys, index_integ); + + linked_alg = vnet_crypto_link_algs (key_crypto->alg, key_integ->alg); + if (linked_alg == ~0) + return ~0; + + pool_get_zero (cm->keys, key); + index = key - cm->keys; + key->type = VNET_CRYPTO_KEY_TYPE_LINK; + key->index_crypto = index_crypto; + key->index_integ = index_integ; + key->async_alg = linked_alg; + + /* *INDENT-OFF* */ + vec_foreach (engine, cm->engines) + if (engine->key_op_handler) + engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index); + /* *INDENT-ON* */ + + return index; +} + +clib_error_t * +crypto_dispatch_enable_disable (int is_enable) +{ + vnet_crypto_main_t *cm = &crypto_main; + vlib_thread_main_t *tm = vlib_get_thread_main (); + u32 skip_master = vlib_num_workers () > 0, i; + vlib_node_state_t state = VLIB_NODE_STATE_DISABLED; + u8 state_change = 0; + + CLIB_MEMORY_STORE_BARRIER (); + if (is_enable && cm->async_refcnt > 0) + { + state_change = 1; + state = + cm->dispatch_mode == + VNET_CRYPTO_ASYNC_DISPATCH_POLLING ? VLIB_NODE_STATE_POLLING : + VLIB_NODE_STATE_INTERRUPT; + } + + if (!is_enable && cm->async_refcnt == 0) + { + state_change = 1; + state = VLIB_NODE_STATE_DISABLED; + } + + if (state_change) + for (i = skip_master; i < tm->n_vlib_mains; i++) + { + vlib_main_t *ovm = vlib_get_main_by_index (i); + if (state != vlib_node_get_state (ovm, cm->crypto_node_index)) + vlib_node_set_state (ovm, cm->crypto_node_index, state); + } + return 0; +} + +static_always_inline void +crypto_set_active_async_engine (vnet_crypto_async_op_data_t * od, + vnet_crypto_async_op_id_t id, u32 ei) +{ + vnet_crypto_main_t *cm = &crypto_main; + vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei); + + if (ce->enqueue_handlers[id] && ce->dequeue_handlers[id]) + { + od->active_engine_index_async = ei; + cm->enqueue_handlers[id] = ce->enqueue_handlers[id]; + cm->dequeue_handlers[id] = ce->dequeue_handlers[id]; + } +} + +int +vnet_crypto_set_async_handler2 (char *alg_name, char *engine) +{ + uword *p; + vnet_crypto_main_t *cm = &crypto_main; + vnet_crypto_async_alg_data_t *ad; + int i; + + p = hash_get_mem (cm->async_alg_index_by_name, alg_name); + if (!p) + return -1; + + ad = vec_elt_at_index (cm->async_algs, p[0]); + + p = hash_get_mem (cm->engine_index_by_name, engine); + if (!p) + return -1; + + for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++) + { + vnet_crypto_async_op_data_t *od; + vnet_crypto_async_op_id_t id = ad->op_by_type[i]; + if (id == 0) + continue; + + od = cm->async_opt_data + id; + crypto_set_active_async_engine (od, id, p[0]); + } + + return 0; +} + +u32 +vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name) +{ + vnet_crypto_main_t *cm = &crypto_main; + vnet_crypto_async_next_node_t *nn = 0; + vlib_node_t *cc, *pn; + uword index = vec_len (cm->next_nodes); + + pn = vlib_get_node_by_name (vm, (u8 *) post_node_name); + if (!pn) + return ~0; + + /* *INDENT-OFF* */ + vec_foreach (cm->next_nodes, nn) + { + if (nn->node_idx == pn->index) + return nn->next_idx; + } + /* *INDENT-ON* */ + + vec_validate (cm->next_nodes, index); + nn = vec_elt_at_index (cm->next_nodes, index); + + cc = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch"); + nn->next_idx = vlib_node_add_named_next (vm, cc->index, post_node_name); + nn->node_idx = pn->index; + + return nn->next_idx; +} + +void +vnet_crypto_request_async_mode (int is_enable) +{ + vnet_crypto_main_t *cm = &crypto_main; + vlib_thread_main_t *tm = vlib_get_thread_main (); + u32 skip_master = vlib_num_workers () > 0, i; + vlib_node_state_t state = VLIB_NODE_STATE_DISABLED; + u8 state_change = 0; + + CLIB_MEMORY_STORE_BARRIER (); + if (is_enable && cm->async_refcnt == 0) + { + state_change = 1; + state = + cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING ? + VLIB_NODE_STATE_POLLING : VLIB_NODE_STATE_INTERRUPT; + } + if (!is_enable && cm->async_refcnt == 1) + { + state_change = 1; + state = VLIB_NODE_STATE_DISABLED; + } + + if (state_change) + for (i = skip_master; i < tm->n_vlib_mains; i++) + { + vlib_main_t *ovm = vlib_get_main_by_index (i); + if (state != vlib_node_get_state (ovm, cm->crypto_node_index)) + vlib_node_set_state (ovm, cm->crypto_node_index, state); + } + + if (is_enable) + cm->async_refcnt += 1; + else if (cm->async_refcnt > 0) + cm->async_refcnt -= 1; +} + +void +vnet_crypto_set_async_dispatch_mode (u8 mode) +{ + vnet_crypto_main_t *cm = &crypto_main; + u32 skip_master = vlib_num_workers () > 0, i; + vlib_thread_main_t *tm = vlib_get_thread_main (); + vlib_node_state_t state = VLIB_NODE_STATE_DISABLED; + + CLIB_MEMORY_STORE_BARRIER (); + cm->dispatch_mode = mode; + if (mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT) + { + state = + cm->async_refcnt == 0 ? + VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_INTERRUPT; + } + else if (mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING) + { + state = + cm->async_refcnt == 0 ? + VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_POLLING; + } + + for (i = skip_master; i < tm->n_vlib_mains; i++) + { + vlib_main_t *ovm = vlib_get_main_by_index (i); + if (state != vlib_node_get_state (ovm, cm->crypto_node_index)) + vlib_node_set_state (ovm, cm->crypto_node_index, state); + } +} + +int +vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t op) +{ + vnet_crypto_main_t *cm = &crypto_main; + + return (op < vec_len (cm->enqueue_handlers) && + NULL != cm->enqueue_handlers[op]); +} + static void vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid, vnet_crypto_op_id_t did, char *name, u8 is_aead) @@ -378,6 +674,20 @@ vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid, hash_set_mem (cm->alg_index_by_name, name, alg); } +static void +vnet_crypto_init_hash_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t id, + char *name) +{ + vnet_crypto_main_t *cm = &crypto_main; + cm->algs[alg].name = name; + cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HASH] = id; + cm->opt_data[id].alg = alg; + cm->opt_data[id].active_engine_index_simple = ~0; + cm->opt_data[id].active_engine_index_chained = ~0; + cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HASH; + hash_set_mem (cm->alg_index_by_name, name, alg); +} + static void vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t id, char *name) @@ -392,16 +702,47 @@ vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg, hash_set_mem (cm->alg_index_by_name, name, alg); } +static void +vnet_crypto_init_async_data (vnet_crypto_async_alg_t alg, + vnet_crypto_async_op_id_t eid, + vnet_crypto_async_op_id_t did, char *name) +{ + vnet_crypto_main_t *cm = &crypto_main; + + cm->async_algs[alg].name = name; + cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT] = eid; + cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT] = did; + cm->async_opt_data[eid].type = VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT; + cm->async_opt_data[eid].alg = alg; + cm->async_opt_data[eid].active_engine_index_async = ~0; + cm->async_opt_data[eid].active_engine_index_async = ~0; + cm->async_opt_data[did].type = VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT; + cm->async_opt_data[did].alg = alg; + cm->async_opt_data[did].active_engine_index_async = ~0; + cm->async_opt_data[did].active_engine_index_async = ~0; + hash_set_mem (cm->async_alg_index_by_name, name, alg); +} + clib_error_t * vnet_crypto_init (vlib_main_t * vm) { vnet_crypto_main_t *cm = &crypto_main; vlib_thread_main_t *tm = vlib_get_thread_main (); + vnet_crypto_thread_t *ct = 0; + + cm->dispatch_mode = VNET_CRYPTO_ASYNC_DISPATCH_POLLING; cm->engine_index_by_name = hash_create_string ( /* size */ 0, sizeof (uword)); cm->alg_index_by_name = hash_create_string (0, sizeof (uword)); + cm->async_alg_index_by_name = hash_create_string (0, sizeof (uword)); vec_validate_aligned (cm->threads, tm->n_vlib_mains, CLIB_CACHE_LINE_BYTES); + vec_foreach (ct, cm->threads) + pool_alloc_aligned (ct->frame_pool, VNET_CRYPTO_FRAME_POOL_SIZE, + CLIB_CACHE_LINE_BYTES); vec_validate (cm->algs, VNET_CRYPTO_N_ALGS); + vec_validate (cm->async_algs, VNET_CRYPTO_N_ASYNC_ALGS); + clib_bitmap_validate (cm->async_active_ids, VNET_CRYPTO_ASYNC_OP_N_IDS); + #define _(n, s, l) \ vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \ VNET_CRYPTO_OP_##n##_ENC, \ @@ -419,6 +760,28 @@ vnet_crypto_init (vlib_main_t * vm) VNET_CRYPTO_OP_##n##_HMAC, "hmac-" s); foreach_crypto_hmac_alg; #undef _ +#define _(n, s) \ + vnet_crypto_init_hash_data (VNET_CRYPTO_ALG_HASH_##n, \ + VNET_CRYPTO_OP_##n##_HASH, s); + foreach_crypto_hash_alg; +#undef _ +#define _(n, s, k, t, a) \ + vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, \ + VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \ + VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \ + s); + foreach_crypto_aead_async_alg +#undef _ +#define _(c, h, s, k ,d) \ + vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##c##_##h##_TAG##d, \ + VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \ + VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \ + s); + foreach_crypto_link_async_alg +#undef _ + cm->crypto_node_index = + vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch")->index; + return 0; }