2 * Copyright (c) 2018 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
17 #include <vlib/vlib.h>
18 #include <vnet/crypto/crypto.h>
20 vnet_crypto_main_t crypto_main;
22 static_always_inline void
23 crypto_set_op_status (vnet_crypto_op_t * ops[], u32 n_ops, int status)
27 ops[0]->status = status;
32 static_always_inline u32
33 vnet_crypto_process_ops_call_handler (vlib_main_t * vm,
34 vnet_crypto_main_t * cm,
35 vnet_crypto_op_id_t opt,
36 vnet_crypto_op_t * ops[],
37 vnet_crypto_op_chunk_t * chunks,
47 if (cm->chained_ops_handlers[opt] == 0)
48 crypto_set_op_status (ops, n_ops,
49 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
51 rv = (cm->chained_ops_handlers[opt]) (vm, ops, chunks, n_ops);
55 if (cm->ops_handlers[opt] == 0)
56 crypto_set_op_status (ops, n_ops,
57 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
59 rv = (cm->ops_handlers[opt]) (vm, ops, n_ops);
65 static_always_inline u32
66 vnet_crypto_process_ops_inline (vlib_main_t * vm, vnet_crypto_op_t ops[],
67 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
69 vnet_crypto_main_t *cm = &crypto_main;
70 const int op_q_size = VLIB_FRAME_SIZE;
71 vnet_crypto_op_t *op_queue[op_q_size];
72 vnet_crypto_op_id_t opt, current_op_type = ~0;
78 for (i = 0; i < n_ops; i++)
82 if (current_op_type != opt || n_op_queue >= op_q_size)
84 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
88 current_op_type = opt;
91 op_queue[n_op_queue++] = &ops[i];
94 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
95 op_queue, chunks, n_op_queue);
100 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[], u32 n_ops)
102 return vnet_crypto_process_ops_inline (vm, ops, 0, n_ops);
106 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
107 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
109 return vnet_crypto_process_ops_inline (vm, ops, chunks, n_ops);
113 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
116 vnet_crypto_main_t *cm = &crypto_main;
117 vnet_crypto_engine_t *p;
119 vec_add2 (cm->engines, p, 1);
124 hash_set_mem (cm->engine_index_by_name, p->name, p - cm->engines);
126 return p - cm->engines;
129 static_always_inline void
130 crypto_set_active_engine (vnet_crypto_op_data_t * od,
131 vnet_crypto_op_id_t id, u32 ei,
132 crypto_op_class_type_t oct)
134 vnet_crypto_main_t *cm = &crypto_main;
135 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
137 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_CHAINED)
139 if (ce->chained_ops_handlers[id])
141 od->active_engine_index_chained = ei;
142 cm->chained_ops_handlers[id] = ce->chained_ops_handlers[id];
146 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_SIMPLE)
148 if (ce->ops_handlers[id])
150 od->active_engine_index_simple = ei;
151 cm->ops_handlers[id] = ce->ops_handlers[id];
157 vnet_crypto_set_handler2 (char *alg_name, char *engine,
158 crypto_op_class_type_t oct)
161 vnet_crypto_main_t *cm = &crypto_main;
162 vnet_crypto_alg_data_t *ad;
165 p = hash_get_mem (cm->alg_index_by_name, alg_name);
169 ad = vec_elt_at_index (cm->algs, p[0]);
171 p = hash_get_mem (cm->engine_index_by_name, engine);
175 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
177 vnet_crypto_op_data_t *od;
178 vnet_crypto_op_id_t id = ad->op_by_type[i];
182 od = cm->opt_data + id;
183 crypto_set_active_engine (od, id, p[0], oct);
190 vnet_crypto_is_set_handler (vnet_crypto_alg_t alg)
192 vnet_crypto_main_t *cm = &crypto_main;
194 return (alg < vec_len (cm->ops_handlers) && NULL != cm->ops_handlers[alg]);
198 vnet_crypto_register_ops_handler_inline (vlib_main_t * vm, u32 engine_index,
199 vnet_crypto_op_id_t opt,
200 vnet_crypto_ops_handler_t * fn,
201 vnet_crypto_chained_ops_handler_t *
204 vnet_crypto_main_t *cm = &crypto_main;
205 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
206 vnet_crypto_op_data_t *otd = cm->opt_data + opt;
207 vec_validate_aligned (cm->ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
208 CLIB_CACHE_LINE_BYTES);
209 vec_validate_aligned (cm->chained_ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
210 CLIB_CACHE_LINE_BYTES);
214 e->ops_handlers[opt] = fn;
215 if (otd->active_engine_index_simple == ~0)
217 otd->active_engine_index_simple = engine_index;
218 cm->ops_handlers[opt] = fn;
221 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_simple);
222 if (ae->priority < e->priority)
223 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_SIMPLE);
228 e->chained_ops_handlers[opt] = cfn;
229 if (otd->active_engine_index_chained == ~0)
231 otd->active_engine_index_chained = engine_index;
232 cm->chained_ops_handlers[opt] = cfn;
235 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_chained);
236 if (ae->priority < e->priority)
237 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_CHAINED);
244 vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
245 vnet_crypto_op_id_t opt,
246 vnet_crypto_ops_handler_t * fn)
248 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, 0);
252 vnet_crypto_register_chained_ops_handler (vlib_main_t * vm, u32 engine_index,
253 vnet_crypto_op_id_t opt,
254 vnet_crypto_chained_ops_handler_t *
257 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, 0, fn);
261 vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
262 vnet_crypto_op_id_t opt,
263 vnet_crypto_ops_handler_t * fn,
264 vnet_crypto_chained_ops_handler_t * cfn)
266 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, cfn);
270 vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
271 vnet_crypto_key_handler_t * key_handler)
273 vnet_crypto_main_t *cm = &crypto_main;
274 vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index);
275 e->key_op_handler = key_handler;
280 vnet_crypto_key_len_check (vnet_crypto_alg_t alg, u16 length)
284 case VNET_CRYPTO_N_ALGS:
286 case VNET_CRYPTO_ALG_NONE:
290 case VNET_CRYPTO_ALG_##n: \
294 foreach_crypto_cipher_alg foreach_crypto_aead_alg
296 /* HMAC allows any key length */
298 case VNET_CRYPTO_ALG_HMAC_##n: \
300 foreach_crypto_hmac_alg
308 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data,
312 vnet_crypto_main_t *cm = &crypto_main;
313 vnet_crypto_engine_t *engine;
314 vnet_crypto_key_t *key;
316 if (!vnet_crypto_key_len_check (alg, length))
319 pool_get_zero (cm->keys, key);
320 index = key - cm->keys;
322 vec_validate_aligned (key->data, length - 1, CLIB_CACHE_LINE_BYTES);
323 clib_memcpy (key->data, data, length);
326 vec_foreach (engine, cm->engines)
327 if (engine->key_op_handler)
328 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
334 vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index)
336 vnet_crypto_main_t *cm = &crypto_main;
337 vnet_crypto_engine_t *engine;
338 vnet_crypto_key_t *key = pool_elt_at_index (cm->keys, index);
341 vec_foreach (engine, cm->engines)
342 if (engine->key_op_handler)
343 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_DEL, index);
346 clib_memset (key->data, 0, vec_len (key->data));
347 vec_free (key->data);
348 pool_put (cm->keys, key);
352 vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid,
353 vnet_crypto_op_id_t did, char *name, u8 is_aead)
355 vnet_crypto_op_type_t eopt, dopt;
356 vnet_crypto_main_t *cm = &crypto_main;
358 cm->algs[alg].name = name;
359 cm->opt_data[eid].alg = cm->opt_data[did].alg = alg;
360 cm->opt_data[eid].active_engine_index_simple = ~0;
361 cm->opt_data[did].active_engine_index_simple = ~0;
362 cm->opt_data[eid].active_engine_index_chained = ~0;
363 cm->opt_data[did].active_engine_index_chained = ~0;
366 eopt = VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT;
367 dopt = VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT;
371 eopt = VNET_CRYPTO_OP_TYPE_ENCRYPT;
372 dopt = VNET_CRYPTO_OP_TYPE_DECRYPT;
374 cm->opt_data[eid].type = eopt;
375 cm->opt_data[did].type = dopt;
376 cm->algs[alg].op_by_type[eopt] = eid;
377 cm->algs[alg].op_by_type[dopt] = did;
378 hash_set_mem (cm->alg_index_by_name, name, alg);
382 vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg,
383 vnet_crypto_op_id_t id, char *name)
385 vnet_crypto_main_t *cm = &crypto_main;
386 cm->algs[alg].name = name;
387 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HMAC] = id;
388 cm->opt_data[id].alg = alg;
389 cm->opt_data[id].active_engine_index_simple = ~0;
390 cm->opt_data[id].active_engine_index_chained = ~0;
391 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HMAC;
392 hash_set_mem (cm->alg_index_by_name, name, alg);
396 vnet_crypto_init (vlib_main_t * vm)
398 vnet_crypto_main_t *cm = &crypto_main;
399 vlib_thread_main_t *tm = vlib_get_thread_main ();
400 cm->engine_index_by_name = hash_create_string ( /* size */ 0,
402 cm->alg_index_by_name = hash_create_string (0, sizeof (uword));
403 vec_validate_aligned (cm->threads, tm->n_vlib_mains, CLIB_CACHE_LINE_BYTES);
404 vec_validate (cm->algs, VNET_CRYPTO_N_ALGS);
406 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
407 VNET_CRYPTO_OP_##n##_ENC, \
408 VNET_CRYPTO_OP_##n##_DEC, s, 0);
409 foreach_crypto_cipher_alg;
412 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
413 VNET_CRYPTO_OP_##n##_ENC, \
414 VNET_CRYPTO_OP_##n##_DEC, s, 1);
415 foreach_crypto_aead_alg;
418 vnet_crypto_init_hmac_data (VNET_CRYPTO_ALG_HMAC_##n, \
419 VNET_CRYPTO_OP_##n##_HMAC, "hmac-" s);
420 foreach_crypto_hmac_alg;
425 VLIB_INIT_FUNCTION (vnet_crypto_init);
428 * fd.io coding-style-patch-verification: ON
431 * eval: (c-set-style "gnu")