2 * Copyright (c) 2018 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
17 #include <vlib/vlib.h>
18 #include <vnet/crypto/crypto.h>
20 vnet_crypto_main_t crypto_main;
22 static_always_inline void
23 crypto_set_op_status (vnet_crypto_op_t * ops[], u32 n_ops, int status)
27 ops[0]->status = status;
32 static_always_inline u32
33 vnet_crypto_process_ops_call_handler (vlib_main_t * vm,
34 vnet_crypto_main_t * cm,
35 vnet_crypto_op_id_t opt,
36 vnet_crypto_op_t * ops[],
37 vnet_crypto_op_chunk_t * chunks,
47 if (cm->chained_ops_handlers[opt] == 0)
48 crypto_set_op_status (ops, n_ops,
49 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
51 rv = (cm->chained_ops_handlers[opt]) (vm, ops, chunks, n_ops);
55 if (cm->ops_handlers[opt] == 0)
56 crypto_set_op_status (ops, n_ops,
57 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
59 rv = (cm->ops_handlers[opt]) (vm, ops, n_ops);
64 static_always_inline u32
65 vnet_crypto_process_ops_inline (vlib_main_t * vm, vnet_crypto_op_t ops[],
66 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
68 vnet_crypto_main_t *cm = &crypto_main;
69 const int op_q_size = VLIB_FRAME_SIZE;
70 vnet_crypto_op_t *op_queue[op_q_size];
71 vnet_crypto_op_id_t opt, current_op_type = ~0;
77 for (i = 0; i < n_ops; i++)
81 if (current_op_type != opt || n_op_queue >= op_q_size)
83 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
87 current_op_type = opt;
90 op_queue[n_op_queue++] = &ops[i];
93 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
94 op_queue, chunks, n_op_queue);
99 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[], u32 n_ops)
101 return vnet_crypto_process_ops_inline (vm, ops, 0, n_ops);
105 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
106 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
108 return vnet_crypto_process_ops_inline (vm, ops, chunks, n_ops);
112 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
115 vnet_crypto_main_t *cm = &crypto_main;
116 vnet_crypto_engine_t *p;
118 vec_add2 (cm->engines, p, 1);
123 hash_set_mem (cm->engine_index_by_name, p->name, p - cm->engines);
125 return p - cm->engines;
128 static_always_inline void
129 crypto_set_active_engine (vnet_crypto_op_data_t * od,
130 vnet_crypto_op_id_t id, u32 ei,
131 crypto_op_class_type_t oct)
133 vnet_crypto_main_t *cm = &crypto_main;
134 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
136 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_CHAINED)
138 if (ce->chained_ops_handlers[id])
140 od->active_engine_index_chained = ei;
141 cm->chained_ops_handlers[id] = ce->chained_ops_handlers[id];
145 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_SIMPLE)
147 if (ce->ops_handlers[id])
149 od->active_engine_index_simple = ei;
150 cm->ops_handlers[id] = ce->ops_handlers[id];
156 vnet_crypto_set_handler2 (char *alg_name, char *engine,
157 crypto_op_class_type_t oct)
160 vnet_crypto_main_t *cm = &crypto_main;
161 vnet_crypto_alg_data_t *ad;
164 p = hash_get_mem (cm->alg_index_by_name, alg_name);
168 ad = vec_elt_at_index (cm->algs, p[0]);
170 p = hash_get_mem (cm->engine_index_by_name, engine);
174 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
176 vnet_crypto_op_data_t *od;
177 vnet_crypto_op_id_t id = ad->op_by_type[i];
181 od = cm->opt_data + id;
182 crypto_set_active_engine (od, id, p[0], oct);
189 vnet_crypto_is_set_handler (vnet_crypto_alg_t alg)
191 vnet_crypto_main_t *cm = &crypto_main;
192 vnet_crypto_op_id_t opt = 0;
195 if (alg > vec_len (cm->algs))
198 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
199 if ((opt = cm->algs[alg].op_by_type[i]) != 0)
202 return NULL != cm->ops_handlers[opt];
206 vnet_crypto_register_ops_handler_inline (vlib_main_t * vm, u32 engine_index,
207 vnet_crypto_op_id_t opt,
208 vnet_crypto_ops_handler_t * fn,
209 vnet_crypto_chained_ops_handler_t *
212 vnet_crypto_main_t *cm = &crypto_main;
213 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
214 vnet_crypto_op_data_t *otd = cm->opt_data + opt;
215 vec_validate_aligned (cm->ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
216 CLIB_CACHE_LINE_BYTES);
217 vec_validate_aligned (cm->chained_ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
218 CLIB_CACHE_LINE_BYTES);
222 e->ops_handlers[opt] = fn;
223 if (otd->active_engine_index_simple == ~0)
225 otd->active_engine_index_simple = engine_index;
226 cm->ops_handlers[opt] = fn;
229 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_simple);
230 if (ae->priority < e->priority)
231 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_SIMPLE);
236 e->chained_ops_handlers[opt] = cfn;
237 if (otd->active_engine_index_chained == ~0)
239 otd->active_engine_index_chained = engine_index;
240 cm->chained_ops_handlers[opt] = cfn;
243 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_chained);
244 if (ae->priority < e->priority)
245 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_CHAINED);
252 vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
253 vnet_crypto_op_id_t opt,
254 vnet_crypto_ops_handler_t * fn)
256 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, 0);
260 vnet_crypto_register_chained_ops_handler (vlib_main_t * vm, u32 engine_index,
261 vnet_crypto_op_id_t opt,
262 vnet_crypto_chained_ops_handler_t *
265 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, 0, fn);
269 vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
270 vnet_crypto_op_id_t opt,
271 vnet_crypto_ops_handler_t * fn,
272 vnet_crypto_chained_ops_handler_t * cfn)
274 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, cfn);
278 vnet_crypto_register_enqueue_handler (vlib_main_t *vm, u32 engine_index,
279 vnet_crypto_async_op_id_t opt,
280 vnet_crypto_frame_enqueue_t *enqueue_hdl)
282 vnet_crypto_main_t *cm = &crypto_main;
283 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
284 vnet_crypto_async_op_data_t *otd = cm->async_opt_data + opt;
285 vec_validate_aligned (cm->enqueue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS,
286 CLIB_CACHE_LINE_BYTES);
291 e->enqueue_handlers[opt] = enqueue_hdl;
292 if (otd->active_engine_index_async == ~0)
294 otd->active_engine_index_async = engine_index;
295 cm->enqueue_handlers[opt] = enqueue_hdl;
298 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_async);
299 if (ae->priority <= e->priority)
301 otd->active_engine_index_async = engine_index;
302 cm->enqueue_handlers[opt] = enqueue_hdl;
309 engine_index_cmp (void *v1, void *v2)
322 vnet_crypto_update_cm_dequeue_handlers (void)
324 vnet_crypto_main_t *cm = &crypto_main;
325 vnet_crypto_async_op_data_t *otd;
326 vnet_crypto_engine_t *e;
327 u32 *active_engines = 0, *ei, last_ei = ~0, i;
329 vec_reset_length (cm->dequeue_handlers);
331 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_IDS; i++)
333 otd = cm->async_opt_data + i;
334 if (otd->active_engine_index_async == ~0)
336 e = cm->engines + otd->active_engine_index_async;
337 if (!e->dequeue_handler)
339 vec_add1 (active_engines, otd->active_engine_index_async);
342 vec_sort_with_function (active_engines, engine_index_cmp);
344 vec_foreach (ei, active_engines)
346 if (ei[0] == last_ei)
351 e = cm->engines + ei[0];
352 vec_add1 (cm->dequeue_handlers, e->dequeue_handler);
356 vec_free (active_engines);
360 vnet_crypto_register_dequeue_handler (vlib_main_t *vm, u32 engine_index,
361 vnet_crypto_frame_dequeue_t *deq_fn)
363 vnet_crypto_main_t *cm = &crypto_main;
364 vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index);
369 e->dequeue_handler = deq_fn;
371 vnet_crypto_update_cm_dequeue_handlers ();
377 vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
378 vnet_crypto_key_handler_t * key_handler)
380 vnet_crypto_main_t *cm = &crypto_main;
381 vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index);
382 e->key_op_handler = key_handler;
387 vnet_crypto_key_len_check (vnet_crypto_alg_t alg, u16 length)
391 case VNET_CRYPTO_N_ALGS:
393 case VNET_CRYPTO_ALG_NONE:
397 case VNET_CRYPTO_ALG_##n: \
401 foreach_crypto_cipher_alg foreach_crypto_aead_alg
403 /* HMAC allows any key length */
405 case VNET_CRYPTO_ALG_HMAC_##n: \
407 foreach_crypto_hmac_alg
411 case VNET_CRYPTO_ALG_HASH_##n: \
413 foreach_crypto_hash_alg
421 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data,
425 vnet_crypto_main_t *cm = &crypto_main;
426 vnet_crypto_engine_t *engine;
427 vnet_crypto_key_t *key;
429 u8 need_barrier_sync = 0;
431 if (!vnet_crypto_key_len_check (alg, length))
434 need_barrier_sync = pool_get_will_expand (cm->keys);
435 /* If the cm->keys will expand, stop the parade. */
436 if (need_barrier_sync)
437 vlib_worker_thread_barrier_sync (vm);
439 pool_get_zero (cm->keys, key);
441 if (need_barrier_sync)
442 vlib_worker_thread_barrier_release (vm);
444 index = key - cm->keys;
445 key->type = VNET_CRYPTO_KEY_TYPE_DATA;
447 vec_validate_aligned (key->data, length - 1, CLIB_CACHE_LINE_BYTES);
448 clib_memcpy (key->data, data, length);
450 vec_foreach (engine, cm->engines)
451 if (engine->key_op_handler)
452 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
458 vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index)
460 vnet_crypto_main_t *cm = &crypto_main;
461 vnet_crypto_engine_t *engine;
462 vnet_crypto_key_t *key = pool_elt_at_index (cm->keys, index);
465 vec_foreach (engine, cm->engines)
466 if (engine->key_op_handler)
467 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_DEL, index);
470 if (key->type == VNET_CRYPTO_KEY_TYPE_DATA)
472 clib_memset (key->data, 0xfe, vec_len (key->data));
473 vec_free (key->data);
475 else if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
477 key->index_crypto = key->index_integ = ~0;
480 pool_put (cm->keys, key);
483 vnet_crypto_async_alg_t
484 vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
485 vnet_crypto_alg_t integ_alg)
487 #define _(c, h, s, k ,d) \
488 if (crypto_alg == VNET_CRYPTO_ALG_##c && \
489 integ_alg == VNET_CRYPTO_ALG_HMAC_##h) \
490 return VNET_CRYPTO_ALG_##c##_##h##_TAG##d;
491 foreach_crypto_link_async_alg
497 vnet_crypto_key_add_linked (vlib_main_t * vm,
498 vnet_crypto_key_index_t index_crypto,
499 vnet_crypto_key_index_t index_integ)
502 vnet_crypto_main_t *cm = &crypto_main;
503 vnet_crypto_engine_t *engine;
504 vnet_crypto_key_t *key_crypto, *key_integ, *key;
505 vnet_crypto_async_alg_t linked_alg;
507 key_crypto = pool_elt_at_index (cm->keys, index_crypto);
508 key_integ = pool_elt_at_index (cm->keys, index_integ);
510 linked_alg = vnet_crypto_link_algs (key_crypto->alg, key_integ->alg);
511 if (linked_alg == ~0)
514 pool_get_zero (cm->keys, key);
515 index = key - cm->keys;
516 key->type = VNET_CRYPTO_KEY_TYPE_LINK;
517 key->index_crypto = index_crypto;
518 key->index_integ = index_integ;
519 key->async_alg = linked_alg;
522 vec_foreach (engine, cm->engines)
523 if (engine->key_op_handler)
524 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
530 static_always_inline void
531 crypto_set_active_async_engine (vnet_crypto_async_op_data_t * od,
532 vnet_crypto_async_op_id_t id, u32 ei)
534 vnet_crypto_main_t *cm = &crypto_main;
535 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
537 if (ce->enqueue_handlers[id] && ce->dequeue_handler)
539 od->active_engine_index_async = ei;
540 cm->enqueue_handlers[id] = ce->enqueue_handlers[id];
545 vnet_crypto_set_async_handler2 (char *alg_name, char *engine)
548 vnet_crypto_main_t *cm = &crypto_main;
549 vnet_crypto_async_alg_data_t *ad;
552 p = hash_get_mem (cm->async_alg_index_by_name, alg_name);
556 ad = vec_elt_at_index (cm->async_algs, p[0]);
558 p = hash_get_mem (cm->engine_index_by_name, engine);
562 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++)
564 vnet_crypto_async_op_data_t *od;
565 vnet_crypto_async_op_id_t id = ad->op_by_type[i];
569 od = cm->async_opt_data + id;
570 crypto_set_active_async_engine (od, id, p[0]);
573 vnet_crypto_update_cm_dequeue_handlers ();
579 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name)
581 vnet_crypto_main_t *cm = &crypto_main;
582 vnet_crypto_async_next_node_t *nn = 0;
583 vlib_node_t *cc, *pn;
584 uword index = vec_len (cm->next_nodes);
586 pn = vlib_get_node_by_name (vm, (u8 *) post_node_name);
591 vec_foreach (nn, cm->next_nodes)
593 if (nn->node_idx == pn->index)
598 vec_validate (cm->next_nodes, index);
599 nn = vec_elt_at_index (cm->next_nodes, index);
601 cc = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
602 nn->next_idx = vlib_node_add_named_next (vm, cc->index, post_node_name);
603 nn->node_idx = pn->index;
609 vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t op)
611 vnet_crypto_main_t *cm = &crypto_main;
613 return (op < vec_len (cm->enqueue_handlers) &&
614 NULL != cm->enqueue_handlers[op]);
618 vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid,
619 vnet_crypto_op_id_t did, char *name, u8 is_aead)
621 vnet_crypto_op_type_t eopt, dopt;
622 vnet_crypto_main_t *cm = &crypto_main;
624 cm->algs[alg].name = name;
625 cm->opt_data[eid].alg = cm->opt_data[did].alg = alg;
626 cm->opt_data[eid].active_engine_index_simple = ~0;
627 cm->opt_data[did].active_engine_index_simple = ~0;
628 cm->opt_data[eid].active_engine_index_chained = ~0;
629 cm->opt_data[did].active_engine_index_chained = ~0;
632 eopt = VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT;
633 dopt = VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT;
637 eopt = VNET_CRYPTO_OP_TYPE_ENCRYPT;
638 dopt = VNET_CRYPTO_OP_TYPE_DECRYPT;
640 cm->opt_data[eid].type = eopt;
641 cm->opt_data[did].type = dopt;
642 cm->algs[alg].op_by_type[eopt] = eid;
643 cm->algs[alg].op_by_type[dopt] = did;
644 hash_set_mem (cm->alg_index_by_name, name, alg);
648 vnet_crypto_init_hash_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t id,
651 vnet_crypto_main_t *cm = &crypto_main;
652 cm->algs[alg].name = name;
653 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HASH] = id;
654 cm->opt_data[id].alg = alg;
655 cm->opt_data[id].active_engine_index_simple = ~0;
656 cm->opt_data[id].active_engine_index_chained = ~0;
657 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HASH;
658 hash_set_mem (cm->alg_index_by_name, name, alg);
662 vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg,
663 vnet_crypto_op_id_t id, char *name)
665 vnet_crypto_main_t *cm = &crypto_main;
666 cm->algs[alg].name = name;
667 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HMAC] = id;
668 cm->opt_data[id].alg = alg;
669 cm->opt_data[id].active_engine_index_simple = ~0;
670 cm->opt_data[id].active_engine_index_chained = ~0;
671 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HMAC;
672 hash_set_mem (cm->alg_index_by_name, name, alg);
676 vnet_crypto_init_async_data (vnet_crypto_async_alg_t alg,
677 vnet_crypto_async_op_id_t eid,
678 vnet_crypto_async_op_id_t did, char *name)
680 vnet_crypto_main_t *cm = &crypto_main;
682 cm->async_algs[alg].name = name;
683 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT] = eid;
684 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT] = did;
685 cm->async_opt_data[eid].type = VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT;
686 cm->async_opt_data[eid].alg = alg;
687 cm->async_opt_data[eid].active_engine_index_async = ~0;
688 cm->async_opt_data[eid].active_engine_index_async = ~0;
689 cm->async_opt_data[did].type = VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT;
690 cm->async_opt_data[did].alg = alg;
691 cm->async_opt_data[did].active_engine_index_async = ~0;
692 cm->async_opt_data[did].active_engine_index_async = ~0;
693 hash_set_mem (cm->async_alg_index_by_name, name, alg);
697 vnet_crypto_init (vlib_main_t * vm)
699 vnet_crypto_main_t *cm = &crypto_main;
700 vlib_thread_main_t *tm = vlib_get_thread_main ();
701 vnet_crypto_thread_t *ct = 0;
703 cm->engine_index_by_name = hash_create_string ( /* size */ 0,
705 cm->alg_index_by_name = hash_create_string (0, sizeof (uword));
706 cm->async_alg_index_by_name = hash_create_string (0, sizeof (uword));
707 vec_validate_aligned (cm->threads, tm->n_vlib_mains, CLIB_CACHE_LINE_BYTES);
708 vec_foreach (ct, cm->threads)
709 pool_alloc_aligned (ct->frame_pool, VNET_CRYPTO_FRAME_POOL_SIZE,
710 CLIB_CACHE_LINE_BYTES);
711 vec_validate (cm->algs, VNET_CRYPTO_N_ALGS);
712 vec_validate (cm->async_algs, VNET_CRYPTO_N_ASYNC_ALGS);
715 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
716 VNET_CRYPTO_OP_##n##_ENC, \
717 VNET_CRYPTO_OP_##n##_DEC, s, 0);
718 foreach_crypto_cipher_alg;
721 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
722 VNET_CRYPTO_OP_##n##_ENC, \
723 VNET_CRYPTO_OP_##n##_DEC, s, 1);
724 foreach_crypto_aead_alg;
727 vnet_crypto_init_hmac_data (VNET_CRYPTO_ALG_HMAC_##n, \
728 VNET_CRYPTO_OP_##n##_HMAC, "hmac-" s);
729 foreach_crypto_hmac_alg;
732 vnet_crypto_init_hash_data (VNET_CRYPTO_ALG_HASH_##n, \
733 VNET_CRYPTO_OP_##n##_HASH, s);
734 foreach_crypto_hash_alg;
736 #define _(n, s, k, t, a) \
737 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, \
738 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
739 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
741 foreach_crypto_aead_async_alg
743 #define _(c, h, s, k ,d) \
744 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##c##_##h##_TAG##d, \
745 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
746 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
748 foreach_crypto_link_async_alg
750 cm->crypto_node_index =
751 vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch")->index;
756 VLIB_INIT_FUNCTION (vnet_crypto_init);
759 * fd.io coding-style-patch-verification: ON
762 * eval: (c-set-style "gnu")