2 * Copyright (c) 2019 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 #ifndef included_vnet_crypto_crypto_h
17 #define included_vnet_crypto_crypto_h
19 #include <vlib/vlib.h>
21 #define VNET_CRYPTO_FRAME_SIZE 32
23 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
24 #define foreach_crypto_cipher_alg \
25 _(DES_CBC, "des-cbc", 7) \
26 _(3DES_CBC, "3des-cbc", 24) \
27 _(AES_128_CBC, "aes-128-cbc", 16) \
28 _(AES_192_CBC, "aes-192-cbc", 24) \
29 _(AES_256_CBC, "aes-256-cbc", 32) \
30 _(AES_128_CTR, "aes-128-ctr", 16) \
31 _(AES_192_CTR, "aes-192-ctr", 24) \
32 _(AES_256_CTR, "aes-256-ctr", 32)
34 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
35 #define foreach_crypto_aead_alg \
36 _(AES_128_GCM, "aes-128-gcm", 16) \
37 _(AES_192_GCM, "aes-192-gcm", 24) \
38 _(AES_256_GCM, "aes-256-gcm", 32)
40 #define foreach_crypto_hmac_alg \
43 _(SHA224, "sha-224") \
44 _(SHA256, "sha-256") \
45 _(SHA384, "sha-384") \
48 #define foreach_crypto_op_type \
49 _(ENCRYPT, "encrypt") \
50 _(DECRYPT, "decrypt") \
51 _(AEAD_ENCRYPT, "aead-encrypt") \
52 _(AEAD_DECRYPT, "aead-decrypt") \
57 #define _(n, s) VNET_CRYPTO_OP_TYPE_##n,
58 foreach_crypto_op_type
60 VNET_CRYPTO_OP_N_TYPES,
61 } vnet_crypto_op_type_t;
63 #define foreach_crypto_op_status \
65 _(PENDING, "pending") \
66 _(WORK_IN_PROGRESS, "work-in-progress") \
67 _(COMPLETED, "completed") \
68 _(FAIL_NO_HANDLER, "no-handler") \
69 _(FAIL_BAD_HMAC, "bad-hmac") \
70 _(FAIL_ENGINE_ERR, "engine-error")
74 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, TAG_LEN, AAD_LEN */
75 #define foreach_crypto_aead_async_alg \
76 _(AES_128_GCM, "aes-128-gcm-aad8", 16, 16, 8) \
77 _(AES_128_GCM, "aes-128-gcm-aad12", 16, 16, 12) \
78 _(AES_192_GCM, "aes-192-gcm-aad8", 24, 16, 8) \
79 _(AES_192_GCM, "aes-192-gcm-aad12", 24, 16, 12) \
80 _(AES_256_GCM, "aes-256-gcm-aad8", 32, 16, 8) \
81 _(AES_256_GCM, "aes-256-gcm-aad12", 32, 16, 12)
83 /* CRYPTO_ID, INTEG_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, DIGEST_LEN */
84 #define foreach_crypto_link_async_alg \
85 _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12) \
86 _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12) \
87 _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12) \
88 _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14) \
89 _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14) \
90 _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14) \
91 _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16) \
92 _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16) \
93 _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16) \
94 _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24) \
95 _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24) \
96 _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24) \
97 _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32) \
98 _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32) \
99 _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32)
101 #define foreach_crypto_async_op_type \
102 _(ENCRYPT, "async-encrypt") \
103 _(DECRYPT, "async-decrypt")
107 VNET_CRYPTO_KEY_OP_ADD,
108 VNET_CRYPTO_KEY_OP_DEL,
109 VNET_CRYPTO_KEY_OP_MODIFY,
110 } vnet_crypto_key_op_t;
114 #define _(n, s) VNET_CRYPTO_OP_STATUS_##n,
115 foreach_crypto_op_status
117 VNET_CRYPTO_OP_N_STATUS,
118 } vnet_crypto_op_status_t;
123 VNET_CRYPTO_ALG_NONE = 0,
124 #define _(n, s, l) VNET_CRYPTO_ALG_##n,
125 foreach_crypto_cipher_alg
126 foreach_crypto_aead_alg
128 #define _(n, s) VNET_CRYPTO_ALG_HMAC_##n,
129 foreach_crypto_hmac_alg
136 #define _(n, s) VNET_CRYPTO_ASYNC_OP_TYPE_##n,
137 foreach_crypto_async_op_type
139 VNET_CRYPTO_ASYNC_OP_N_TYPES,
140 } vnet_crypto_async_op_type_t;
144 VNET_CRYPTO_ASYNC_ALG_NONE = 0,
145 #define _(n, s, k, t, a) \
146 VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a,
147 foreach_crypto_aead_async_alg
149 #define _(c, h, s, k ,d) \
150 VNET_CRYPTO_ALG_##c##_##h##_TAG##d,
151 foreach_crypto_link_async_alg
153 VNET_CRYPTO_N_ASYNC_ALGS,
154 } vnet_crypto_async_alg_t;
158 VNET_CRYPTO_ASYNC_OP_NONE = 0,
159 #define _(n, s, k, t, a) \
160 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
161 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC,
162 foreach_crypto_aead_async_alg
164 #define _(c, h, s, k ,d) \
165 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
166 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC,
167 foreach_crypto_link_async_alg
169 VNET_CRYPTO_ASYNC_OP_N_IDS,
170 } vnet_crypto_async_op_id_t;
179 vnet_crypto_alg_t alg:8;
185 vnet_crypto_async_alg_t async_alg:8;
188 #define VNET_CRYPTO_KEY_TYPE_DATA 0
189 #define VNET_CRYPTO_KEY_TYPE_LINK 1
195 VNET_CRYPTO_OP_NONE = 0,
196 #define _(n, s, l) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC,
197 foreach_crypto_cipher_alg
198 foreach_crypto_aead_alg
200 #define _(n, s) VNET_CRYPTO_OP_##n##_HMAC,
201 foreach_crypto_hmac_alg
203 VNET_CRYPTO_N_OP_IDS,
204 } vnet_crypto_op_id_t;
213 } crypto_op_class_type_t;
218 vnet_crypto_op_id_t op_by_type[VNET_CRYPTO_OP_N_TYPES];
219 } vnet_crypto_alg_data_t;
226 } vnet_crypto_op_chunk_t;
230 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
232 vnet_crypto_op_id_t op:16;
233 vnet_crypto_op_status_t status:8;
235 #define VNET_CRYPTO_OP_FLAG_INIT_IV (1 << 0)
236 #define VNET_CRYPTO_OP_FLAG_HMAC_CHECK (1 << 1)
237 #define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS (1 << 2)
254 /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
261 /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
276 STATIC_ASSERT_SIZEOF (vnet_crypto_op_t, CLIB_CACHE_LINE_BYTES);
280 vnet_crypto_op_type_t type;
281 vnet_crypto_alg_t alg;
282 u32 active_engine_index_simple;
283 u32 active_engine_index_chained;
284 } vnet_crypto_op_data_t;
288 vnet_crypto_async_op_type_t type;
289 vnet_crypto_async_alg_t alg;
290 u32 active_engine_index_async;
291 } vnet_crypto_async_op_data_t;
296 vnet_crypto_async_op_id_t op_by_type[VNET_CRYPTO_ASYNC_OP_N_TYPES];
297 } vnet_crypto_async_alg_data_t;
301 vnet_crypto_op_status_t status:8;
303 i16 crypto_start_offset; /* first buffer offset */
304 i16 integ_start_offset;
305 u32 crypto_total_length;
306 /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
307 u16 integ_length_adj;
315 u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */
316 } vnet_crypto_async_frame_elt_t;
320 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
321 #define VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED 0
322 #define VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS 1
323 #define VNET_CRYPTO_FRAME_STATE_SUCCESS 2
324 #define VNET_CRYPTO_FRAME_STATE_ELT_ERROR 3
326 vnet_crypto_async_op_id_t op:8;
328 vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE];
329 u32 buffer_indices[VNET_CRYPTO_FRAME_SIZE];
330 u16 next_node_index[VNET_CRYPTO_FRAME_SIZE];
331 } vnet_crypto_async_frame_t;
335 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
336 vnet_crypto_async_frame_t *frames[VNET_CRYPTO_ASYNC_OP_N_IDS];
337 vnet_crypto_async_frame_t *frame_pool;
340 } vnet_crypto_thread_t;
342 typedef u32 vnet_crypto_key_index_t;
344 typedef u32 (vnet_crypto_chained_ops_handler_t) (vlib_main_t * vm,
345 vnet_crypto_op_t * ops[],
346 vnet_crypto_op_chunk_t *
349 typedef u32 (vnet_crypto_ops_handler_t) (vlib_main_t * vm,
350 vnet_crypto_op_t * ops[], u32 n_ops);
352 typedef void (vnet_crypto_key_handler_t) (vlib_main_t * vm,
353 vnet_crypto_key_op_t kop,
354 vnet_crypto_key_index_t idx);
356 /** async crypto function handlers **/
357 typedef int (vnet_crypto_frame_enqueue_t) (vlib_main_t * vm,
358 vnet_crypto_async_frame_t * frame);
359 typedef vnet_crypto_async_frame_t *
360 (vnet_crypto_frame_dequeue_t) (vlib_main_t * vm);
362 u32 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
365 void vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
366 vnet_crypto_op_id_t opt,
367 vnet_crypto_ops_handler_t * oph);
369 void vnet_crypto_register_chained_ops_handler (vlib_main_t * vm,
371 vnet_crypto_op_id_t opt,
372 vnet_crypto_chained_ops_handler_t
375 void vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
376 vnet_crypto_op_id_t opt,
377 vnet_crypto_ops_handler_t * fn,
378 vnet_crypto_chained_ops_handler_t *
381 void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
382 vnet_crypto_key_handler_t * keyh);
384 /** async crypto register functions */
385 u32 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name);
386 void vnet_crypto_register_async_handler (vlib_main_t * vm,
388 vnet_crypto_async_op_id_t opt,
389 vnet_crypto_frame_enqueue_t * enq_fn,
390 vnet_crypto_frame_dequeue_t *
398 vnet_crypto_key_handler_t *key_op_handler;
399 vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_IDS];
400 vnet_crypto_chained_ops_handler_t
401 * chained_ops_handlers[VNET_CRYPTO_N_OP_IDS];
402 vnet_crypto_frame_enqueue_t *enqueue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
403 vnet_crypto_frame_dequeue_t *dequeue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
404 } vnet_crypto_engine_t;
410 } vnet_crypto_async_next_node_t;
414 vnet_crypto_alg_data_t *algs;
415 vnet_crypto_thread_t *threads;
416 vnet_crypto_ops_handler_t **ops_handlers;
417 vnet_crypto_chained_ops_handler_t **chained_ops_handlers;
418 vnet_crypto_frame_enqueue_t **enqueue_handlers;
419 vnet_crypto_frame_dequeue_t **dequeue_handlers;
420 clib_bitmap_t *async_active_ids;
421 vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS];
422 vnet_crypto_async_op_data_t async_opt_data[VNET_CRYPTO_ASYNC_OP_N_IDS];
423 vnet_crypto_engine_t *engines;
424 vnet_crypto_key_t *keys;
425 uword *engine_index_by_name;
426 uword *alg_index_by_name;
427 uword *async_alg_index_by_name;
428 vnet_crypto_async_alg_data_t *async_algs;
430 vnet_crypto_async_next_node_t *next_nodes;
431 } vnet_crypto_main_t;
433 extern vnet_crypto_main_t crypto_main;
435 u32 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
436 vnet_crypto_op_chunk_t * chunks,
438 u32 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
442 int vnet_crypto_set_handler2 (char *ops_handler_name, char *engine,
443 crypto_op_class_type_t oct);
444 int vnet_crypto_is_set_handler (vnet_crypto_alg_t alg);
446 u32 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg,
447 u8 * data, u16 length);
448 void vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index);
451 * Use 2 created keys to generate new key for linked algs (cipher + integ)
452 * The returned key index is to be used for linked alg only.
454 u32 vnet_crypto_key_add_linked (vlib_main_t * vm,
455 vnet_crypto_key_index_t index_crypto,
456 vnet_crypto_key_index_t index_integ);
458 clib_error_t *crypto_dispatch_enable_disable (int is_enable);
460 int vnet_crypto_set_async_handler2 (char *alg_name, char *engine);
462 int vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t opt);
464 void vnet_crypto_request_async_mode (int is_enable);
466 vnet_crypto_async_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
467 vnet_crypto_alg_t integ_alg);
469 clib_error_t *crypto_dispatch_enable_disable (int is_enable);
471 format_function_t format_vnet_crypto_alg;
472 format_function_t format_vnet_crypto_engine;
473 format_function_t format_vnet_crypto_op;
474 format_function_t format_vnet_crypto_op_type;
475 format_function_t format_vnet_crypto_op_status;
476 unformat_function_t unformat_vnet_crypto_alg;
478 format_function_t format_vnet_crypto_async_op;
479 format_function_t format_vnet_crypto_async_alg;
480 format_function_t format_vnet_crypto_async_op_type;
482 static_always_inline void
483 vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_id_t type)
486 clib_memset (op, 0xfe, sizeof (*op));
493 static_always_inline vnet_crypto_op_type_t
494 vnet_crypto_get_op_type (vnet_crypto_op_id_t id)
496 vnet_crypto_main_t *cm = &crypto_main;
497 ASSERT (id < VNET_CRYPTO_N_OP_IDS);
498 vnet_crypto_op_data_t *od = cm->opt_data + id;
502 static_always_inline vnet_crypto_key_t *
503 vnet_crypto_get_key (vnet_crypto_key_index_t index)
505 vnet_crypto_main_t *cm = &crypto_main;
506 return vec_elt_at_index (cm->keys, index);
509 static_always_inline int
510 vnet_crypto_set_handler (char *alg_name, char *engine)
512 return vnet_crypto_set_handler2 (alg_name, engine, CRYPTO_OP_BOTH);
515 /** async crypto inline functions **/
517 static_always_inline vnet_crypto_async_frame_t *
518 vnet_crypto_async_get_frame (vlib_main_t * vm, vnet_crypto_async_op_id_t opt)
520 vnet_crypto_main_t *cm = &crypto_main;
521 vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
522 vnet_crypto_async_frame_t *f = ct->frames[opt];
526 pool_get_aligned (ct->frame_pool, f, CLIB_CACHE_LINE_BYTES);
528 clib_memset (f, 0xfe, sizeof (*f));
529 f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
537 static_always_inline void
538 vnet_crypto_async_free_frame (vlib_main_t * vm,
539 vnet_crypto_async_frame_t * frame)
541 vnet_crypto_main_t *cm = &crypto_main;
542 vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
543 pool_put (ct->frame_pool, frame);
546 static_always_inline int
547 vnet_crypto_async_submit_open_frame (vlib_main_t * vm,
548 vnet_crypto_async_frame_t * frame)
550 vnet_crypto_main_t *cm = &crypto_main;
551 vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
552 vnet_crypto_async_op_id_t opt = frame->op;
553 int ret = (cm->enqueue_handlers[frame->op]) (vm, frame);
554 clib_bitmap_set_no_check (cm->async_active_ids, opt, 1);
555 if (PREDICT_TRUE (ret == 0))
557 vnet_crypto_async_frame_t *nf = 0;
558 frame->state = VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS;
559 pool_get_aligned (ct->frame_pool, nf, CLIB_CACHE_LINE_BYTES);
561 clib_memset (nf, 0xfe, sizeof (*nf));
562 nf->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
565 ct->frames[opt] = nf;
570 static_always_inline int
571 vnet_crypto_async_add_to_frame (vlib_main_t * vm,
572 vnet_crypto_async_frame_t ** frame,
574 u32 crypto_len, i16 integ_len_adj,
575 i16 crypto_start_offset,
576 u16 integ_start_offset,
579 u8 * iv, u8 * tag, u8 * aad, u8 flags)
581 vnet_crypto_async_frame_t *f = *frame;
582 vnet_crypto_async_frame_elt_t *fe;
585 if (PREDICT_FALSE (f->n_elts == VNET_CRYPTO_FRAME_SIZE))
587 vnet_crypto_async_op_id_t opt = f->op;
589 ret = vnet_crypto_async_submit_open_frame (vm, f);
590 if (PREDICT_FALSE (ret < 0))
592 f = vnet_crypto_async_get_frame (vm, opt);
597 fe = &f->elts[index];
599 fe->key_index = key_index;
600 fe->crypto_total_length = crypto_len;
601 fe->crypto_start_offset = crypto_start_offset;
602 fe->integ_start_offset = integ_start_offset;
603 fe->integ_length_adj = integ_len_adj;
608 f->buffer_indices[index] = buffer_index;
609 f->next_node_index[index] = next_node;
614 static_always_inline void
615 vnet_crypto_async_reset_frame (vnet_crypto_async_frame_t * f)
617 vnet_crypto_async_op_id_t opt;
619 ASSERT (f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED);
622 clib_memset (f, 0xfe, sizeof (*f));
623 f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
628 #endif /* included_vnet_crypto_crypto_h */
631 * fd.io coding-style-patch-verification: ON
634 * eval: (c-set-style "gnu")