a44c8910555d7792424efe03e940f08ab906f6fd
[vpp.git] / src / vnet / crypto / crypto.h
1 /*
2  * Copyright (c) 2019 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15
16 #ifndef included_vnet_crypto_crypto_h
17 #define included_vnet_crypto_crypto_h
18
19 #include <vlib/vlib.h>
20
21 #define VNET_CRYPTO_FRAME_SIZE 64
22 #define VNET_CRYPTO_FRAME_POOL_SIZE 1024
23
24 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
25 #define foreach_crypto_cipher_alg \
26   _(DES_CBC,     "des-cbc", 7) \
27   _(3DES_CBC,    "3des-cbc", 24) \
28   _(AES_128_CBC, "aes-128-cbc", 16) \
29   _(AES_192_CBC, "aes-192-cbc", 24) \
30   _(AES_256_CBC, "aes-256-cbc", 32) \
31   _(AES_128_CTR, "aes-128-ctr", 16) \
32   _(AES_192_CTR, "aes-192-ctr", 24) \
33   _(AES_256_CTR, "aes-256-ctr", 32)
34
35 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
36 #define foreach_crypto_aead_alg \
37   _(AES_128_GCM, "aes-128-gcm", 16) \
38   _(AES_192_GCM, "aes-192-gcm", 24) \
39   _(AES_256_GCM, "aes-256-gcm", 32) \
40   _(CHACHA20_POLY1305, "chacha20-poly1305", 32)
41
42 #define foreach_crypto_hmac_alg \
43   _(MD5, "md5") \
44   _(SHA1, "sha-1") \
45   _(SHA224, "sha-224")  \
46   _(SHA256, "sha-256")  \
47   _(SHA384, "sha-384")  \
48   _(SHA512, "sha-512")
49
50 #define foreach_crypto_op_type \
51   _(ENCRYPT, "encrypt") \
52   _(DECRYPT, "decrypt") \
53   _(AEAD_ENCRYPT, "aead-encrypt") \
54   _(AEAD_DECRYPT, "aead-decrypt") \
55   _(HMAC, "hmac")
56
57 typedef enum
58 {
59 #define _(n, s) VNET_CRYPTO_OP_TYPE_##n,
60   foreach_crypto_op_type
61 #undef _
62     VNET_CRYPTO_OP_N_TYPES,
63 } vnet_crypto_op_type_t;
64
65 #define foreach_crypto_op_status \
66   _(IDLE, "idle") \
67   _(PENDING, "pending") \
68   _(WORK_IN_PROGRESS, "work-in-progress") \
69   _(COMPLETED, "completed") \
70   _(FAIL_NO_HANDLER, "no-handler") \
71   _(FAIL_BAD_HMAC, "bad-hmac") \
72   _(FAIL_ENGINE_ERR, "engine-error")
73
74 /** async crypto **/
75
76 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, TAG_LEN, AAD_LEN */
77 #define foreach_crypto_aead_async_alg \
78   _(AES_128_GCM, "aes-128-gcm-aad8", 16, 16, 8) \
79   _(AES_128_GCM, "aes-128-gcm-aad12", 16, 16, 12) \
80   _(AES_192_GCM, "aes-192-gcm-aad8", 24, 16, 8) \
81   _(AES_192_GCM, "aes-192-gcm-aad12", 24, 16, 12) \
82   _(AES_256_GCM, "aes-256-gcm-aad8", 32, 16, 8) \
83   _(AES_256_GCM, "aes-256-gcm-aad12", 32, 16, 12) \
84   _(CHACHA20_POLY1305, "chacha20-poly1305-aad8", 32, 16, 8) \
85   _(CHACHA20_POLY1305, "chacha20-poly1305-aad12", 32, 16, 12)
86
87 /* CRYPTO_ID, INTEG_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, DIGEST_LEN */
88 #define foreach_crypto_link_async_alg                                         \
89   _ (3DES_CBC, MD5, "3des-cbc-hmac-md5", 24, 12)                              \
90   _ (AES_128_CBC, MD5, "aes-128-cbc-hmac-md5", 16, 12)                        \
91   _ (AES_192_CBC, MD5, "aes-192-cbc-hmac-md5", 24, 12)                        \
92   _ (AES_256_CBC, MD5, "aes-256-cbc-hmac-md5", 32, 12)                        \
93   _ (3DES_CBC, SHA1, "3des-cbc-hmac-sha-1", 24, 12)                           \
94   _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12)                     \
95   _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12)                     \
96   _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12)                     \
97   _ (3DES_CBC, SHA224, "3des-cbc-hmac-sha-224", 24, 14)                       \
98   _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14)                 \
99   _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14)                 \
100   _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14)                 \
101   _ (3DES_CBC, SHA256, "3des-cbc-hmac-sha-256", 24, 16)                       \
102   _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16)                 \
103   _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16)                 \
104   _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16)                 \
105   _ (3DES_CBC, SHA384, "3des-cbc-hmac-sha-384", 24, 24)                       \
106   _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24)                 \
107   _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24)                 \
108   _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24)                 \
109   _ (3DES_CBC, SHA512, "3des-cbc-hmac-sha-512", 24, 32)                       \
110   _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32)                 \
111   _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32)                 \
112   _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32)                 \
113   _ (AES_128_CTR, SHA1, "aes-128-ctr-hmac-sha-1", 16, 12)                     \
114   _ (AES_192_CTR, SHA1, "aes-192-ctr-hmac-sha-1", 24, 12)                     \
115   _ (AES_256_CTR, SHA1, "aes-256-ctr-hmac-sha-1", 32, 12)
116
117 #define foreach_crypto_async_op_type \
118   _(ENCRYPT, "async-encrypt") \
119   _(DECRYPT, "async-decrypt")
120
121 typedef enum
122 {
123   VNET_CRYPTO_KEY_OP_ADD,
124   VNET_CRYPTO_KEY_OP_DEL,
125   VNET_CRYPTO_KEY_OP_MODIFY,
126 } vnet_crypto_key_op_t;
127
128 typedef enum
129 {
130 #define _(n, s) VNET_CRYPTO_OP_STATUS_##n,
131   foreach_crypto_op_status
132 #undef _
133     VNET_CRYPTO_OP_N_STATUS,
134 } vnet_crypto_op_status_t;
135
136 /* *INDENT-OFF* */
137 typedef enum
138 {
139   VNET_CRYPTO_ALG_NONE = 0,
140 #define _(n, s, l) VNET_CRYPTO_ALG_##n,
141   foreach_crypto_cipher_alg
142   foreach_crypto_aead_alg
143 #undef _
144 #define _(n, s) VNET_CRYPTO_ALG_HMAC_##n,
145   foreach_crypto_hmac_alg
146 #undef _
147   VNET_CRYPTO_N_ALGS,
148 } vnet_crypto_alg_t;
149
150 typedef enum
151 {
152 #define _(n, s) VNET_CRYPTO_ASYNC_OP_TYPE_##n,
153   foreach_crypto_async_op_type
154 #undef _
155     VNET_CRYPTO_ASYNC_OP_N_TYPES,
156 } vnet_crypto_async_op_type_t;
157
158 typedef enum
159 {
160   VNET_CRYPTO_ASYNC_ALG_NONE = 0,
161 #define _(n, s, k, t, a) \
162   VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a,
163   foreach_crypto_aead_async_alg
164 #undef _
165 #define _(c, h, s, k ,d) \
166   VNET_CRYPTO_ALG_##c##_##h##_TAG##d,
167   foreach_crypto_link_async_alg
168 #undef _
169   VNET_CRYPTO_N_ASYNC_ALGS,
170 } vnet_crypto_async_alg_t;
171
172 typedef enum
173 {
174   VNET_CRYPTO_ASYNC_OP_NONE = 0,
175 #define _(n, s, k, t, a) \
176   VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
177   VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC,
178   foreach_crypto_aead_async_alg
179 #undef _
180 #define _(c, h, s, k ,d) \
181   VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
182   VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC,
183   foreach_crypto_link_async_alg
184 #undef _
185   VNET_CRYPTO_ASYNC_OP_N_IDS,
186 } vnet_crypto_async_op_id_t;
187
188 typedef struct
189 {
190   union
191   {
192     struct
193     {
194       u8 *data;
195       vnet_crypto_alg_t alg:8;
196     };
197     struct
198     {
199       u32 index_crypto;
200       u32 index_integ;
201       vnet_crypto_async_alg_t async_alg:8;
202     };
203   };
204 #define VNET_CRYPTO_KEY_TYPE_DATA 0
205 #define VNET_CRYPTO_KEY_TYPE_LINK 1
206   u8 type;
207 } vnet_crypto_key_t;
208
209 typedef enum
210 {
211   VNET_CRYPTO_OP_NONE = 0,
212 #define _(n, s, l) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC,
213   foreach_crypto_cipher_alg
214   foreach_crypto_aead_alg
215 #undef _
216 #define _(n, s) VNET_CRYPTO_OP_##n##_HMAC,
217  foreach_crypto_hmac_alg
218 #undef _
219     VNET_CRYPTO_N_OP_IDS,
220 } vnet_crypto_op_id_t;
221 /* *INDENT-ON* */
222
223 typedef enum
224 {
225   CRYPTO_OP_SIMPLE,
226   CRYPTO_OP_CHAINED,
227   CRYPTO_OP_BOTH,
228 } crypto_op_class_type_t;
229
230 typedef struct
231 {
232   char *name;
233   vnet_crypto_op_id_t op_by_type[VNET_CRYPTO_OP_N_TYPES];
234 } vnet_crypto_alg_data_t;
235
236 typedef struct
237 {
238   u8 *src;
239   u8 *dst;
240   u32 len;
241 } vnet_crypto_op_chunk_t;
242
243 typedef struct
244 {
245   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
246   uword user_data;
247   vnet_crypto_op_id_t op:16;
248   vnet_crypto_op_status_t status:8;
249   u8 flags;
250 #define VNET_CRYPTO_OP_FLAG_INIT_IV (1 << 0)
251 #define VNET_CRYPTO_OP_FLAG_HMAC_CHECK (1 << 1)
252 #define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS (1 << 2)
253
254   union
255   {
256     u8 digest_len;
257     u8 tag_len;
258   };
259   u16 aad_len;
260
261   union
262   {
263     struct
264     {
265       u8 *src;
266       u8 *dst;
267     };
268
269     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
270     u16 n_chunks;
271   };
272
273   union
274   {
275     u32 len;
276     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
277     u32 chunk_index;
278   };
279
280   u32 key_index;
281   u8 *iv;
282   u8 *aad;
283
284   union
285   {
286     u8 *tag;
287     u8 *digest;
288   };
289 } vnet_crypto_op_t;
290
291 STATIC_ASSERT_SIZEOF (vnet_crypto_op_t, CLIB_CACHE_LINE_BYTES);
292
293 typedef struct
294 {
295   vnet_crypto_op_type_t type;
296   vnet_crypto_alg_t alg;
297   u32 active_engine_index_simple;
298   u32 active_engine_index_chained;
299 } vnet_crypto_op_data_t;
300
301 typedef struct
302 {
303   vnet_crypto_async_op_type_t type;
304   vnet_crypto_async_alg_t alg;
305   u32 active_engine_index_async;
306 } vnet_crypto_async_op_data_t;
307
308 typedef struct
309 {
310   char *name;
311   vnet_crypto_async_op_id_t op_by_type[VNET_CRYPTO_ASYNC_OP_N_TYPES];
312 } vnet_crypto_async_alg_data_t;
313
314 typedef struct
315 {
316   u8 *iv;
317   union
318   {
319     u8 *digest;
320     u8 *tag;
321   };
322   u8 *aad;
323   u32 key_index;
324   u32 crypto_total_length;
325   i16 crypto_start_offset; /* first buffer offset */
326   i16 integ_start_offset;
327   /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
328   u16 integ_length_adj;
329   vnet_crypto_op_status_t status : 8;
330   u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */
331 } vnet_crypto_async_frame_elt_t;
332
333 /* Assert the size so the compiler will warn us when it changes */
334 STATIC_ASSERT_SIZEOF (vnet_crypto_async_frame_elt_t, 5 * sizeof (u64));
335
336 typedef enum vnet_crypto_async_frame_state_t_
337 {
338   VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED,
339   /* frame waiting to be processed */
340   VNET_CRYPTO_FRAME_STATE_PENDING,
341   VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS,
342   VNET_CRYPTO_FRAME_STATE_SUCCESS,
343   VNET_CRYPTO_FRAME_STATE_ELT_ERROR
344 } __clib_packed vnet_crypto_async_frame_state_t;
345
346 typedef struct
347 {
348   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
349   vnet_crypto_async_frame_state_t state;
350   vnet_crypto_async_op_id_t op:8;
351   u16 n_elts;
352   vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE];
353   u32 buffer_indices[VNET_CRYPTO_FRAME_SIZE];
354   u16 next_node_index[VNET_CRYPTO_FRAME_SIZE];
355   u32 enqueue_thread_index;
356 } vnet_crypto_async_frame_t;
357
358 typedef struct
359 {
360   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
361   vnet_crypto_async_frame_t *frame_pool;
362   u32 *buffer_indices;
363   u16 *nexts;
364 } vnet_crypto_thread_t;
365
366 typedef u32 vnet_crypto_key_index_t;
367
368 typedef u32 (vnet_crypto_chained_ops_handler_t) (vlib_main_t * vm,
369                                                  vnet_crypto_op_t * ops[],
370                                                  vnet_crypto_op_chunk_t *
371                                                  chunks, u32 n_ops);
372
373 typedef u32 (vnet_crypto_ops_handler_t) (vlib_main_t * vm,
374                                          vnet_crypto_op_t * ops[], u32 n_ops);
375
376 typedef void (vnet_crypto_key_handler_t) (vlib_main_t * vm,
377                                           vnet_crypto_key_op_t kop,
378                                           vnet_crypto_key_index_t idx);
379
380 /** async crypto function handlers **/
381 typedef int
382   (vnet_crypto_frame_enqueue_t) (vlib_main_t * vm,
383                                  vnet_crypto_async_frame_t * frame);
384 typedef vnet_crypto_async_frame_t *
385   (vnet_crypto_frame_dequeue_t) (vlib_main_t * vm, u32 * nb_elts_processed,
386                                  u32 * enqueue_thread_idx);
387
388 u32
389 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
390                              char *desc);
391
392 void vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
393                                        vnet_crypto_op_id_t opt,
394                                        vnet_crypto_ops_handler_t * oph);
395
396 void vnet_crypto_register_chained_ops_handler (vlib_main_t * vm,
397                                                u32 engine_index,
398                                                vnet_crypto_op_id_t opt,
399                                                vnet_crypto_chained_ops_handler_t
400                                                * oph);
401
402 void vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
403                                         vnet_crypto_op_id_t opt,
404                                         vnet_crypto_ops_handler_t * fn,
405                                         vnet_crypto_chained_ops_handler_t *
406                                         cfn);
407
408 void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
409                                        vnet_crypto_key_handler_t * keyh);
410
411 /** async crypto register functions */
412 u32 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name);
413 void vnet_crypto_register_async_handler (vlib_main_t * vm,
414                                          u32 engine_index,
415                                          vnet_crypto_async_op_id_t opt,
416                                          vnet_crypto_frame_enqueue_t * enq_fn,
417                                          vnet_crypto_frame_dequeue_t *
418                                          deq_fn);
419
420 typedef struct
421 {
422   char *name;
423   char *desc;
424   int priority;
425   vnet_crypto_key_handler_t *key_op_handler;
426   vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_IDS];
427     vnet_crypto_chained_ops_handler_t
428     * chained_ops_handlers[VNET_CRYPTO_N_OP_IDS];
429   vnet_crypto_frame_enqueue_t *enqueue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
430   vnet_crypto_frame_dequeue_t *dequeue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
431 } vnet_crypto_engine_t;
432
433 typedef struct
434 {
435   u32 node_idx;
436   u32 next_idx;
437 } vnet_crypto_async_next_node_t;
438
439 typedef struct
440 {
441   vnet_crypto_alg_data_t *algs;
442   vnet_crypto_thread_t *threads;
443   vnet_crypto_ops_handler_t **ops_handlers;
444   vnet_crypto_chained_ops_handler_t **chained_ops_handlers;
445   vnet_crypto_frame_enqueue_t **enqueue_handlers;
446   vnet_crypto_frame_dequeue_t **dequeue_handlers;
447   clib_bitmap_t *async_active_ids;
448   vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS];
449   vnet_crypto_async_op_data_t async_opt_data[VNET_CRYPTO_ASYNC_OP_N_IDS];
450   vnet_crypto_engine_t *engines;
451   vnet_crypto_key_t *keys;
452   uword *engine_index_by_name;
453   uword *alg_index_by_name;
454   uword *async_alg_index_by_name;
455   vnet_crypto_async_alg_data_t *async_algs;
456   u32 async_refcnt;
457   vnet_crypto_async_next_node_t *next_nodes;
458   u32 crypto_node_index;
459 #define VNET_CRYPTO_ASYNC_DISPATCH_POLLING 0
460 #define VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT 1
461   u8 dispatch_mode;
462 } vnet_crypto_main_t;
463
464 extern vnet_crypto_main_t crypto_main;
465
466 u32 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
467                                      vnet_crypto_op_chunk_t * chunks,
468                                      u32 n_ops);
469 u32 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
470                              u32 n_ops);
471
472
473 int vnet_crypto_set_handler2 (char *ops_handler_name, char *engine,
474                               crypto_op_class_type_t oct);
475 int vnet_crypto_is_set_handler (vnet_crypto_alg_t alg);
476
477 u32 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg,
478                          u8 * data, u16 length);
479 void vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index);
480
481 /**
482  * Use 2 created keys to generate new key for linked algs (cipher + integ)
483  * The returned key index is to be used for linked alg only.
484  **/
485 u32 vnet_crypto_key_add_linked (vlib_main_t * vm,
486                                 vnet_crypto_key_index_t index_crypto,
487                                 vnet_crypto_key_index_t index_integ);
488
489 clib_error_t *crypto_dispatch_enable_disable (int is_enable);
490
491 int vnet_crypto_set_async_handler2 (char *alg_name, char *engine);
492
493 int vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t opt);
494
495 void vnet_crypto_request_async_mode (int is_enable);
496
497 void vnet_crypto_set_async_dispatch_mode (u8 mode);
498
499 vnet_crypto_async_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
500                                                vnet_crypto_alg_t integ_alg);
501
502 clib_error_t *crypto_dispatch_enable_disable (int is_enable);
503
504 format_function_t format_vnet_crypto_alg;
505 format_function_t format_vnet_crypto_engine;
506 format_function_t format_vnet_crypto_op;
507 format_function_t format_vnet_crypto_op_type;
508 format_function_t format_vnet_crypto_op_status;
509 unformat_function_t unformat_vnet_crypto_alg;
510
511 format_function_t format_vnet_crypto_async_op;
512 format_function_t format_vnet_crypto_async_alg;
513 format_function_t format_vnet_crypto_async_op_type;
514
515 static_always_inline void
516 vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_id_t type)
517 {
518   if (CLIB_DEBUG > 0)
519     clib_memset (op, 0xfe, sizeof (*op));
520   op->op = type;
521   op->flags = 0;
522   op->key_index = ~0;
523   op->n_chunks = 0;
524 }
525
526 static_always_inline vnet_crypto_op_type_t
527 vnet_crypto_get_op_type (vnet_crypto_op_id_t id)
528 {
529   vnet_crypto_main_t *cm = &crypto_main;
530   ASSERT (id < VNET_CRYPTO_N_OP_IDS);
531   vnet_crypto_op_data_t *od = cm->opt_data + id;
532   return od->type;
533 }
534
535 static_always_inline vnet_crypto_key_t *
536 vnet_crypto_get_key (vnet_crypto_key_index_t index)
537 {
538   vnet_crypto_main_t *cm = &crypto_main;
539   return vec_elt_at_index (cm->keys, index);
540 }
541
542 static_always_inline int
543 vnet_crypto_set_handler (char *alg_name, char *engine)
544 {
545   return vnet_crypto_set_handler2 (alg_name, engine, CRYPTO_OP_BOTH);
546 }
547
548 /** async crypto inline functions **/
549
550 static_always_inline vnet_crypto_async_frame_t *
551 vnet_crypto_async_get_frame (vlib_main_t * vm, vnet_crypto_async_op_id_t opt)
552 {
553   vnet_crypto_main_t *cm = &crypto_main;
554   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
555   vnet_crypto_async_frame_t *f = NULL;
556
557   pool_get_aligned (ct->frame_pool, f, CLIB_CACHE_LINE_BYTES);
558   if (CLIB_DEBUG > 0)
559     clib_memset (f, 0xfe, sizeof (*f));
560   f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
561   f->op = opt;
562   f->n_elts = 0;
563
564   return f;
565 }
566
567 static_always_inline void
568 vnet_crypto_async_free_frame (vlib_main_t * vm,
569                               vnet_crypto_async_frame_t * frame)
570 {
571   vnet_crypto_main_t *cm = &crypto_main;
572   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
573   pool_put (ct->frame_pool, frame);
574 }
575
576 static_always_inline int
577 vnet_crypto_async_submit_open_frame (vlib_main_t * vm,
578                                      vnet_crypto_async_frame_t * frame)
579 {
580   vnet_crypto_main_t *cm = &crypto_main;
581   vlib_thread_main_t *tm = vlib_get_thread_main ();
582   vnet_crypto_async_op_id_t opt = frame->op;
583   u32 i = vlib_num_workers () > 0;
584
585   frame->state = VNET_CRYPTO_FRAME_STATE_PENDING;
586   frame->enqueue_thread_index = vm->thread_index;
587
588   int ret = (cm->enqueue_handlers[frame->op]) (vm, frame);
589
590   clib_bitmap_set_no_check (cm->async_active_ids, opt, 1);
591   if (PREDICT_TRUE (ret == 0))
592     {
593       if (cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT)
594         {
595           for (; i < tm->n_vlib_mains; i++)
596             vlib_node_set_interrupt_pending (vlib_get_main_by_index (i),
597                                              cm->crypto_node_index);
598         }
599     }
600   else
601     {
602       frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
603     }
604
605   return ret;
606 }
607
608 static_always_inline void
609 vnet_crypto_async_add_to_frame (vlib_main_t *vm, vnet_crypto_async_frame_t *f,
610                                 u32 key_index, u32 crypto_len,
611                                 i16 integ_len_adj, i16 crypto_start_offset,
612                                 u16 integ_start_offset, u32 buffer_index,
613                                 u16 next_node, u8 *iv, u8 *tag, u8 *aad,
614                                 u8 flags)
615 {
616   vnet_crypto_async_frame_elt_t *fe;
617   u16 index;
618
619   ASSERT (f->n_elts < VNET_CRYPTO_FRAME_SIZE);
620
621   index = f->n_elts;
622   fe = &f->elts[index];
623   f->n_elts++;
624   fe->key_index = key_index;
625   fe->crypto_total_length = crypto_len;
626   fe->crypto_start_offset = crypto_start_offset;
627   fe->integ_start_offset = integ_start_offset;
628   fe->integ_length_adj = integ_len_adj;
629   fe->iv = iv;
630   fe->tag = tag;
631   fe->aad = aad;
632   fe->flags = flags;
633   f->buffer_indices[index] = buffer_index;
634   f->next_node_index[index] = next_node;
635 }
636
637 static_always_inline void
638 vnet_crypto_async_reset_frame (vnet_crypto_async_frame_t * f)
639 {
640   vnet_crypto_async_op_id_t opt;
641   ASSERT (f != 0);
642   ASSERT ((f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED
643            || f->state == VNET_CRYPTO_FRAME_STATE_ELT_ERROR));
644   opt = f->op;
645   if (CLIB_DEBUG > 0)
646     clib_memset (f, 0xfe, sizeof (*f));
647   f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
648   f->op = opt;
649   f->n_elts = 0;
650 }
651
652 static_always_inline u8
653 vnet_crypto_async_frame_is_full (const vnet_crypto_async_frame_t *f)
654 {
655   return (f->n_elts == VNET_CRYPTO_FRAME_SIZE);
656 }
657
658 #endif /* included_vnet_crypto_crypto_h */
659
660 /*
661  * fd.io coding-style-patch-verification: ON
662  *
663  * Local Variables:
664  * eval: (c-set-style "gnu")
665  * End:
666  */