dpdk: selection of cryptodev engine data-path
[vpp.git] / src / vnet / crypto / crypto.h
1 /*
2  * Copyright (c) 2019 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15
16 #ifndef included_vnet_crypto_crypto_h
17 #define included_vnet_crypto_crypto_h
18
19 #include <vlib/vlib.h>
20
21 #define VNET_CRYPTO_FRAME_SIZE 64
22 #define VNET_CRYPTO_FRAME_POOL_SIZE 1024
23
24 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
25 #define foreach_crypto_cipher_alg \
26   _(DES_CBC,     "des-cbc", 7) \
27   _(3DES_CBC,    "3des-cbc", 24) \
28   _(AES_128_CBC, "aes-128-cbc", 16) \
29   _(AES_192_CBC, "aes-192-cbc", 24) \
30   _(AES_256_CBC, "aes-256-cbc", 32) \
31   _(AES_128_CTR, "aes-128-ctr", 16) \
32   _(AES_192_CTR, "aes-192-ctr", 24) \
33   _(AES_256_CTR, "aes-256-ctr", 32)
34
35 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
36 #define foreach_crypto_aead_alg \
37   _(AES_128_GCM, "aes-128-gcm", 16) \
38   _(AES_192_GCM, "aes-192-gcm", 24) \
39   _(AES_256_GCM, "aes-256-gcm", 32) \
40   _(CHACHA20_POLY1305, "chacha20-poly1305", 32)
41
42 #define foreach_crypto_hmac_alg \
43   _(MD5, "md5") \
44   _(SHA1, "sha-1") \
45   _(SHA224, "sha-224")  \
46   _(SHA256, "sha-256")  \
47   _(SHA384, "sha-384")  \
48   _(SHA512, "sha-512")
49
50 #define foreach_crypto_op_type \
51   _(ENCRYPT, "encrypt") \
52   _(DECRYPT, "decrypt") \
53   _(AEAD_ENCRYPT, "aead-encrypt") \
54   _(AEAD_DECRYPT, "aead-decrypt") \
55   _(HMAC, "hmac")
56
57 typedef enum
58 {
59 #define _(n, s) VNET_CRYPTO_OP_TYPE_##n,
60   foreach_crypto_op_type
61 #undef _
62     VNET_CRYPTO_OP_N_TYPES,
63 } vnet_crypto_op_type_t;
64
65 #define foreach_crypto_op_status \
66   _(IDLE, "idle") \
67   _(PENDING, "pending") \
68   _(WORK_IN_PROGRESS, "work-in-progress") \
69   _(COMPLETED, "completed") \
70   _(FAIL_NO_HANDLER, "no-handler") \
71   _(FAIL_BAD_HMAC, "bad-hmac") \
72   _(FAIL_ENGINE_ERR, "engine-error")
73
74 /** async crypto **/
75
76 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, TAG_LEN, AAD_LEN */
77 #define foreach_crypto_aead_async_alg \
78   _(AES_128_GCM, "aes-128-gcm-aad8", 16, 16, 8) \
79   _(AES_128_GCM, "aes-128-gcm-aad12", 16, 16, 12) \
80   _(AES_192_GCM, "aes-192-gcm-aad8", 24, 16, 8) \
81   _(AES_192_GCM, "aes-192-gcm-aad12", 24, 16, 12) \
82   _(AES_256_GCM, "aes-256-gcm-aad8", 32, 16, 8) \
83   _(AES_256_GCM, "aes-256-gcm-aad12", 32, 16, 12) \
84   _(CHACHA20_POLY1305, "chacha20-poly1305-aad8", 32, 16, 8) \
85   _(CHACHA20_POLY1305, "chacha20-poly1305-aad12", 32, 16, 12)
86
87 /* CRYPTO_ID, INTEG_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, DIGEST_LEN */
88 #define foreach_crypto_link_async_alg                                         \
89   _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12)                     \
90   _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12)                     \
91   _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12)                     \
92   _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14)                 \
93   _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14)                 \
94   _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14)                 \
95   _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16)                 \
96   _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16)                 \
97   _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16)                 \
98   _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24)                 \
99   _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24)                 \
100   _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24)                 \
101   _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32)                 \
102   _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32)                 \
103   _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32)                 \
104   _ (AES_128_CTR, SHA1, "aes-128-ctr-hmac-sha-1", 16, 12)                     \
105   _ (AES_192_CTR, SHA1, "aes-192-ctr-hmac-sha-1", 24, 12)                     \
106   _ (AES_256_CTR, SHA1, "aes-256-ctr-hmac-sha-1", 32, 12)
107
108 #define foreach_crypto_async_op_type \
109   _(ENCRYPT, "async-encrypt") \
110   _(DECRYPT, "async-decrypt")
111
112 typedef enum
113 {
114   VNET_CRYPTO_KEY_OP_ADD,
115   VNET_CRYPTO_KEY_OP_DEL,
116   VNET_CRYPTO_KEY_OP_MODIFY,
117 } vnet_crypto_key_op_t;
118
119 typedef enum
120 {
121 #define _(n, s) VNET_CRYPTO_OP_STATUS_##n,
122   foreach_crypto_op_status
123 #undef _
124     VNET_CRYPTO_OP_N_STATUS,
125 } vnet_crypto_op_status_t;
126
127 /* *INDENT-OFF* */
128 typedef enum
129 {
130   VNET_CRYPTO_ALG_NONE = 0,
131 #define _(n, s, l) VNET_CRYPTO_ALG_##n,
132   foreach_crypto_cipher_alg
133   foreach_crypto_aead_alg
134 #undef _
135 #define _(n, s) VNET_CRYPTO_ALG_HMAC_##n,
136   foreach_crypto_hmac_alg
137 #undef _
138   VNET_CRYPTO_N_ALGS,
139 } vnet_crypto_alg_t;
140
141 typedef enum
142 {
143 #define _(n, s) VNET_CRYPTO_ASYNC_OP_TYPE_##n,
144   foreach_crypto_async_op_type
145 #undef _
146     VNET_CRYPTO_ASYNC_OP_N_TYPES,
147 } vnet_crypto_async_op_type_t;
148
149 typedef enum
150 {
151   VNET_CRYPTO_ASYNC_ALG_NONE = 0,
152 #define _(n, s, k, t, a) \
153   VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a,
154   foreach_crypto_aead_async_alg
155 #undef _
156 #define _(c, h, s, k ,d) \
157   VNET_CRYPTO_ALG_##c##_##h##_TAG##d,
158   foreach_crypto_link_async_alg
159 #undef _
160   VNET_CRYPTO_N_ASYNC_ALGS,
161 } vnet_crypto_async_alg_t;
162
163 typedef enum
164 {
165   VNET_CRYPTO_ASYNC_OP_NONE = 0,
166 #define _(n, s, k, t, a) \
167   VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
168   VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC,
169   foreach_crypto_aead_async_alg
170 #undef _
171 #define _(c, h, s, k ,d) \
172   VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
173   VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC,
174   foreach_crypto_link_async_alg
175 #undef _
176   VNET_CRYPTO_ASYNC_OP_N_IDS,
177 } vnet_crypto_async_op_id_t;
178
179 typedef struct
180 {
181   union
182   {
183     struct
184     {
185       u8 *data;
186       vnet_crypto_alg_t alg:8;
187     };
188     struct
189     {
190       u32 index_crypto;
191       u32 index_integ;
192       vnet_crypto_async_alg_t async_alg:8;
193     };
194   };
195 #define VNET_CRYPTO_KEY_TYPE_DATA 0
196 #define VNET_CRYPTO_KEY_TYPE_LINK 1
197   u8 type;
198 } vnet_crypto_key_t;
199
200 typedef enum
201 {
202   VNET_CRYPTO_OP_NONE = 0,
203 #define _(n, s, l) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC,
204   foreach_crypto_cipher_alg
205   foreach_crypto_aead_alg
206 #undef _
207 #define _(n, s) VNET_CRYPTO_OP_##n##_HMAC,
208  foreach_crypto_hmac_alg
209 #undef _
210     VNET_CRYPTO_N_OP_IDS,
211 } vnet_crypto_op_id_t;
212 /* *INDENT-ON* */
213
214 typedef enum
215 {
216   CRYPTO_OP_SIMPLE,
217   CRYPTO_OP_CHAINED,
218   CRYPTO_OP_BOTH,
219 } crypto_op_class_type_t;
220
221 typedef struct
222 {
223   char *name;
224   vnet_crypto_op_id_t op_by_type[VNET_CRYPTO_OP_N_TYPES];
225 } vnet_crypto_alg_data_t;
226
227 typedef struct
228 {
229   u8 *src;
230   u8 *dst;
231   u32 len;
232 } vnet_crypto_op_chunk_t;
233
234 typedef struct
235 {
236   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
237   uword user_data;
238   vnet_crypto_op_id_t op:16;
239   vnet_crypto_op_status_t status:8;
240   u8 flags;
241 #define VNET_CRYPTO_OP_FLAG_INIT_IV (1 << 0)
242 #define VNET_CRYPTO_OP_FLAG_HMAC_CHECK (1 << 1)
243 #define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS (1 << 2)
244
245   union
246   {
247     u8 digest_len;
248     u8 tag_len;
249   };
250   u16 aad_len;
251
252   union
253   {
254     struct
255     {
256       u8 *src;
257       u8 *dst;
258     };
259
260     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
261     u16 n_chunks;
262   };
263
264   union
265   {
266     u32 len;
267     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
268     u32 chunk_index;
269   };
270
271   u32 key_index;
272   u8 *iv;
273   u8 *aad;
274
275   union
276   {
277     u8 *tag;
278     u8 *digest;
279   };
280 } vnet_crypto_op_t;
281
282 STATIC_ASSERT_SIZEOF (vnet_crypto_op_t, CLIB_CACHE_LINE_BYTES);
283
284 typedef struct
285 {
286   vnet_crypto_op_type_t type;
287   vnet_crypto_alg_t alg;
288   u32 active_engine_index_simple;
289   u32 active_engine_index_chained;
290 } vnet_crypto_op_data_t;
291
292 typedef struct
293 {
294   vnet_crypto_async_op_type_t type;
295   vnet_crypto_async_alg_t alg;
296   u32 active_engine_index_async;
297 } vnet_crypto_async_op_data_t;
298
299 typedef struct
300 {
301   char *name;
302   vnet_crypto_async_op_id_t op_by_type[VNET_CRYPTO_ASYNC_OP_N_TYPES];
303 } vnet_crypto_async_alg_data_t;
304
305 typedef struct
306 {
307   u8 *iv;
308   union
309   {
310     u8 *digest;
311     u8 *tag;
312   };
313   u8 *aad;
314   u32 key_index;
315   u32 crypto_total_length;
316   i16 crypto_start_offset; /* first buffer offset */
317   i16 integ_start_offset;
318   /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
319   u16 integ_length_adj;
320   vnet_crypto_op_status_t status : 8;
321   u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */
322 } vnet_crypto_async_frame_elt_t;
323
324 /* Assert the size so the compiler will warn us when it changes */
325 STATIC_ASSERT_SIZEOF (vnet_crypto_async_frame_elt_t, 5 * sizeof (u64));
326
327 typedef enum vnet_crypto_async_frame_state_t_
328 {
329   VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED,
330   /* frame waiting to be processed */
331   VNET_CRYPTO_FRAME_STATE_PENDING,
332   VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS,
333   VNET_CRYPTO_FRAME_STATE_SUCCESS,
334   VNET_CRYPTO_FRAME_STATE_ELT_ERROR
335 } __clib_packed vnet_crypto_async_frame_state_t;
336
337 typedef struct
338 {
339   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
340   vnet_crypto_async_frame_state_t state;
341   vnet_crypto_async_op_id_t op:8;
342   u16 n_elts;
343   vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE];
344   u32 buffer_indices[VNET_CRYPTO_FRAME_SIZE];
345   u16 next_node_index[VNET_CRYPTO_FRAME_SIZE];
346   u32 enqueue_thread_index;
347 } vnet_crypto_async_frame_t;
348
349 typedef struct
350 {
351   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
352   vnet_crypto_async_frame_t *frame_pool;
353   u32 *buffer_indices;
354   u16 *nexts;
355 } vnet_crypto_thread_t;
356
357 typedef u32 vnet_crypto_key_index_t;
358
359 typedef u32 (vnet_crypto_chained_ops_handler_t) (vlib_main_t * vm,
360                                                  vnet_crypto_op_t * ops[],
361                                                  vnet_crypto_op_chunk_t *
362                                                  chunks, u32 n_ops);
363
364 typedef u32 (vnet_crypto_ops_handler_t) (vlib_main_t * vm,
365                                          vnet_crypto_op_t * ops[], u32 n_ops);
366
367 typedef void (vnet_crypto_key_handler_t) (vlib_main_t * vm,
368                                           vnet_crypto_key_op_t kop,
369                                           vnet_crypto_key_index_t idx);
370
371 /** async crypto function handlers **/
372 typedef int
373   (vnet_crypto_frame_enqueue_t) (vlib_main_t * vm,
374                                  vnet_crypto_async_frame_t * frame);
375 typedef vnet_crypto_async_frame_t *
376   (vnet_crypto_frame_dequeue_t) (vlib_main_t * vm, u32 * nb_elts_processed,
377                                  u32 * enqueue_thread_idx);
378
379 u32
380 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
381                              char *desc);
382
383 void vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
384                                        vnet_crypto_op_id_t opt,
385                                        vnet_crypto_ops_handler_t * oph);
386
387 void vnet_crypto_register_chained_ops_handler (vlib_main_t * vm,
388                                                u32 engine_index,
389                                                vnet_crypto_op_id_t opt,
390                                                vnet_crypto_chained_ops_handler_t
391                                                * oph);
392
393 void vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
394                                         vnet_crypto_op_id_t opt,
395                                         vnet_crypto_ops_handler_t * fn,
396                                         vnet_crypto_chained_ops_handler_t *
397                                         cfn);
398
399 void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
400                                        vnet_crypto_key_handler_t * keyh);
401
402 /** async crypto register functions */
403 u32 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name);
404 void vnet_crypto_register_async_handler (vlib_main_t * vm,
405                                          u32 engine_index,
406                                          vnet_crypto_async_op_id_t opt,
407                                          vnet_crypto_frame_enqueue_t * enq_fn,
408                                          vnet_crypto_frame_dequeue_t *
409                                          deq_fn);
410
411 typedef struct
412 {
413   char *name;
414   char *desc;
415   int priority;
416   vnet_crypto_key_handler_t *key_op_handler;
417   vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_IDS];
418     vnet_crypto_chained_ops_handler_t
419     * chained_ops_handlers[VNET_CRYPTO_N_OP_IDS];
420   vnet_crypto_frame_enqueue_t *enqueue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
421   vnet_crypto_frame_dequeue_t *dequeue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
422 } vnet_crypto_engine_t;
423
424 typedef struct
425 {
426   u32 node_idx;
427   u32 next_idx;
428 } vnet_crypto_async_next_node_t;
429
430 typedef struct
431 {
432   vnet_crypto_alg_data_t *algs;
433   vnet_crypto_thread_t *threads;
434   vnet_crypto_ops_handler_t **ops_handlers;
435   vnet_crypto_chained_ops_handler_t **chained_ops_handlers;
436   vnet_crypto_frame_enqueue_t **enqueue_handlers;
437   vnet_crypto_frame_dequeue_t **dequeue_handlers;
438   clib_bitmap_t *async_active_ids;
439   vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS];
440   vnet_crypto_async_op_data_t async_opt_data[VNET_CRYPTO_ASYNC_OP_N_IDS];
441   vnet_crypto_engine_t *engines;
442   vnet_crypto_key_t *keys;
443   uword *engine_index_by_name;
444   uword *alg_index_by_name;
445   uword *async_alg_index_by_name;
446   vnet_crypto_async_alg_data_t *async_algs;
447   u32 async_refcnt;
448   vnet_crypto_async_next_node_t *next_nodes;
449   u32 crypto_node_index;
450 #define VNET_CRYPTO_ASYNC_DISPATCH_POLLING 0
451 #define VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT 1
452   u8 dispatch_mode;
453 } vnet_crypto_main_t;
454
455 extern vnet_crypto_main_t crypto_main;
456
457 u32 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
458                                      vnet_crypto_op_chunk_t * chunks,
459                                      u32 n_ops);
460 u32 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
461                              u32 n_ops);
462
463
464 int vnet_crypto_set_handler2 (char *ops_handler_name, char *engine,
465                               crypto_op_class_type_t oct);
466 int vnet_crypto_is_set_handler (vnet_crypto_alg_t alg);
467
468 u32 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg,
469                          u8 * data, u16 length);
470 void vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index);
471
472 /**
473  * Use 2 created keys to generate new key for linked algs (cipher + integ)
474  * The returned key index is to be used for linked alg only.
475  **/
476 u32 vnet_crypto_key_add_linked (vlib_main_t * vm,
477                                 vnet_crypto_key_index_t index_crypto,
478                                 vnet_crypto_key_index_t index_integ);
479
480 clib_error_t *crypto_dispatch_enable_disable (int is_enable);
481
482 int vnet_crypto_set_async_handler2 (char *alg_name, char *engine);
483
484 int vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t opt);
485
486 void vnet_crypto_request_async_mode (int is_enable);
487
488 void vnet_crypto_set_async_dispatch_mode (u8 mode);
489
490 vnet_crypto_async_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
491                                                vnet_crypto_alg_t integ_alg);
492
493 clib_error_t *crypto_dispatch_enable_disable (int is_enable);
494
495 format_function_t format_vnet_crypto_alg;
496 format_function_t format_vnet_crypto_engine;
497 format_function_t format_vnet_crypto_op;
498 format_function_t format_vnet_crypto_op_type;
499 format_function_t format_vnet_crypto_op_status;
500 unformat_function_t unformat_vnet_crypto_alg;
501
502 format_function_t format_vnet_crypto_async_op;
503 format_function_t format_vnet_crypto_async_alg;
504 format_function_t format_vnet_crypto_async_op_type;
505
506 static_always_inline void
507 vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_id_t type)
508 {
509   if (CLIB_DEBUG > 0)
510     clib_memset (op, 0xfe, sizeof (*op));
511   op->op = type;
512   op->flags = 0;
513   op->key_index = ~0;
514   op->n_chunks = 0;
515 }
516
517 static_always_inline vnet_crypto_op_type_t
518 vnet_crypto_get_op_type (vnet_crypto_op_id_t id)
519 {
520   vnet_crypto_main_t *cm = &crypto_main;
521   ASSERT (id < VNET_CRYPTO_N_OP_IDS);
522   vnet_crypto_op_data_t *od = cm->opt_data + id;
523   return od->type;
524 }
525
526 static_always_inline vnet_crypto_key_t *
527 vnet_crypto_get_key (vnet_crypto_key_index_t index)
528 {
529   vnet_crypto_main_t *cm = &crypto_main;
530   return vec_elt_at_index (cm->keys, index);
531 }
532
533 static_always_inline int
534 vnet_crypto_set_handler (char *alg_name, char *engine)
535 {
536   return vnet_crypto_set_handler2 (alg_name, engine, CRYPTO_OP_BOTH);
537 }
538
539 /** async crypto inline functions **/
540
541 static_always_inline vnet_crypto_async_frame_t *
542 vnet_crypto_async_get_frame (vlib_main_t * vm, vnet_crypto_async_op_id_t opt)
543 {
544   vnet_crypto_main_t *cm = &crypto_main;
545   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
546   vnet_crypto_async_frame_t *f = NULL;
547
548   pool_get_aligned (ct->frame_pool, f, CLIB_CACHE_LINE_BYTES);
549   if (CLIB_DEBUG > 0)
550     clib_memset (f, 0xfe, sizeof (*f));
551   f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
552   f->op = opt;
553   f->n_elts = 0;
554
555   return f;
556 }
557
558 static_always_inline void
559 vnet_crypto_async_free_frame (vlib_main_t * vm,
560                               vnet_crypto_async_frame_t * frame)
561 {
562   vnet_crypto_main_t *cm = &crypto_main;
563   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
564   pool_put (ct->frame_pool, frame);
565 }
566
567 static_always_inline int
568 vnet_crypto_async_submit_open_frame (vlib_main_t * vm,
569                                      vnet_crypto_async_frame_t * frame)
570 {
571   vnet_crypto_main_t *cm = &crypto_main;
572   vlib_thread_main_t *tm = vlib_get_thread_main ();
573   vnet_crypto_async_op_id_t opt = frame->op;
574   u32 i = vlib_num_workers () > 0;
575
576   frame->state = VNET_CRYPTO_FRAME_STATE_PENDING;
577   frame->enqueue_thread_index = vm->thread_index;
578
579   int ret = (cm->enqueue_handlers[frame->op]) (vm, frame);
580
581   clib_bitmap_set_no_check (cm->async_active_ids, opt, 1);
582   if (PREDICT_TRUE (ret == 0))
583     {
584       if (cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT)
585         {
586           for (; i < tm->n_vlib_mains; i++)
587             vlib_node_set_interrupt_pending (vlib_get_main_by_index (i),
588                                              cm->crypto_node_index);
589         }
590     }
591   else
592     {
593       frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
594     }
595
596   return ret;
597 }
598
599 static_always_inline void
600 vnet_crypto_async_add_to_frame (vlib_main_t *vm, vnet_crypto_async_frame_t *f,
601                                 u32 key_index, u32 crypto_len,
602                                 i16 integ_len_adj, i16 crypto_start_offset,
603                                 u16 integ_start_offset, u32 buffer_index,
604                                 u16 next_node, u8 *iv, u8 *tag, u8 *aad,
605                                 u8 flags)
606 {
607   vnet_crypto_async_frame_elt_t *fe;
608   u16 index;
609
610   ASSERT (f->n_elts < VNET_CRYPTO_FRAME_SIZE);
611
612   index = f->n_elts;
613   fe = &f->elts[index];
614   f->n_elts++;
615   fe->key_index = key_index;
616   fe->crypto_total_length = crypto_len;
617   fe->crypto_start_offset = crypto_start_offset;
618   fe->integ_start_offset = integ_start_offset;
619   fe->integ_length_adj = integ_len_adj;
620   fe->iv = iv;
621   fe->tag = tag;
622   fe->aad = aad;
623   fe->flags = flags;
624   f->buffer_indices[index] = buffer_index;
625   f->next_node_index[index] = next_node;
626 }
627
628 static_always_inline void
629 vnet_crypto_async_reset_frame (vnet_crypto_async_frame_t * f)
630 {
631   vnet_crypto_async_op_id_t opt;
632   ASSERT (f != 0);
633   ASSERT ((f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED
634            || f->state == VNET_CRYPTO_FRAME_STATE_ELT_ERROR));
635   opt = f->op;
636   if (CLIB_DEBUG > 0)
637     clib_memset (f, 0xfe, sizeof (*f));
638   f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
639   f->op = opt;
640   f->n_elts = 0;
641 }
642
643 static_always_inline u8
644 vnet_crypto_async_frame_is_full (const vnet_crypto_async_frame_t *f)
645 {
646   return (f->n_elts == VNET_CRYPTO_FRAME_SIZE);
647 }
648
649 #endif /* included_vnet_crypto_crypto_h */
650
651 /*
652  * fd.io coding-style-patch-verification: ON
653  *
654  * Local Variables:
655  * eval: (c-set-style "gnu")
656  * End:
657  */