crypto: add support for aes-ctr+sha-1 chains
[vpp.git] / src / vnet / crypto / crypto.h
1 /*
2  * Copyright (c) 2019 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15
16 #ifndef included_vnet_crypto_crypto_h
17 #define included_vnet_crypto_crypto_h
18
19 #include <vlib/vlib.h>
20
21 #define VNET_CRYPTO_FRAME_SIZE 64
22
23 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
24 #define foreach_crypto_cipher_alg \
25   _(DES_CBC,     "des-cbc", 7) \
26   _(3DES_CBC,    "3des-cbc", 24) \
27   _(AES_128_CBC, "aes-128-cbc", 16) \
28   _(AES_192_CBC, "aes-192-cbc", 24) \
29   _(AES_256_CBC, "aes-256-cbc", 32) \
30   _(AES_128_CTR, "aes-128-ctr", 16) \
31   _(AES_192_CTR, "aes-192-ctr", 24) \
32   _(AES_256_CTR, "aes-256-ctr", 32)
33
34 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
35 #define foreach_crypto_aead_alg \
36   _(AES_128_GCM, "aes-128-gcm", 16) \
37   _(AES_192_GCM, "aes-192-gcm", 24) \
38   _(AES_256_GCM, "aes-256-gcm", 32) \
39   _(CHACHA20_POLY1305, "chacha20-poly1305", 32)
40
41 #define foreach_crypto_hmac_alg \
42   _(MD5, "md5") \
43   _(SHA1, "sha-1") \
44   _(SHA224, "sha-224")  \
45   _(SHA256, "sha-256")  \
46   _(SHA384, "sha-384")  \
47   _(SHA512, "sha-512")
48
49 #define foreach_crypto_op_type \
50   _(ENCRYPT, "encrypt") \
51   _(DECRYPT, "decrypt") \
52   _(AEAD_ENCRYPT, "aead-encrypt") \
53   _(AEAD_DECRYPT, "aead-decrypt") \
54   _(HMAC, "hmac")
55
56 typedef enum
57 {
58 #define _(n, s) VNET_CRYPTO_OP_TYPE_##n,
59   foreach_crypto_op_type
60 #undef _
61     VNET_CRYPTO_OP_N_TYPES,
62 } vnet_crypto_op_type_t;
63
64 #define foreach_crypto_op_status \
65   _(IDLE, "idle") \
66   _(PENDING, "pending") \
67   _(WORK_IN_PROGRESS, "work-in-progress") \
68   _(COMPLETED, "completed") \
69   _(FAIL_NO_HANDLER, "no-handler") \
70   _(FAIL_BAD_HMAC, "bad-hmac") \
71   _(FAIL_ENGINE_ERR, "engine-error")
72
73 /** async crypto **/
74
75 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, TAG_LEN, AAD_LEN */
76 #define foreach_crypto_aead_async_alg \
77   _(AES_128_GCM, "aes-128-gcm-aad8", 16, 16, 8) \
78   _(AES_128_GCM, "aes-128-gcm-aad12", 16, 16, 12) \
79   _(AES_192_GCM, "aes-192-gcm-aad8", 24, 16, 8) \
80   _(AES_192_GCM, "aes-192-gcm-aad12", 24, 16, 12) \
81   _(AES_256_GCM, "aes-256-gcm-aad8", 32, 16, 8) \
82   _(AES_256_GCM, "aes-256-gcm-aad12", 32, 16, 12) \
83   _(CHACHA20_POLY1305, "chacha20-poly1305-aad8", 32, 16, 8) \
84   _(CHACHA20_POLY1305, "chacha20-poly1305-aad12", 32, 16, 12)
85
86 /* CRYPTO_ID, INTEG_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, DIGEST_LEN */
87 #define foreach_crypto_link_async_alg                                         \
88   _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12)                     \
89   _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12)                     \
90   _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12)                     \
91   _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14)                 \
92   _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14)                 \
93   _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14)                 \
94   _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16)                 \
95   _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16)                 \
96   _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16)                 \
97   _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24)                 \
98   _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24)                 \
99   _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24)                 \
100   _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32)                 \
101   _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32)                 \
102   _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32)                 \
103   _ (AES_128_CTR, SHA1, "aes-128-ctr-hmac-sha-1", 16, 12)                     \
104   _ (AES_192_CTR, SHA1, "aes-192-ctr-hmac-sha-1", 24, 12)                     \
105   _ (AES_256_CTR, SHA1, "aes-256-ctr-hmac-sha-1", 32, 12)
106
107 #define foreach_crypto_async_op_type \
108   _(ENCRYPT, "async-encrypt") \
109   _(DECRYPT, "async-decrypt")
110
111 typedef enum
112 {
113   VNET_CRYPTO_KEY_OP_ADD,
114   VNET_CRYPTO_KEY_OP_DEL,
115   VNET_CRYPTO_KEY_OP_MODIFY,
116 } vnet_crypto_key_op_t;
117
118 typedef enum
119 {
120 #define _(n, s) VNET_CRYPTO_OP_STATUS_##n,
121   foreach_crypto_op_status
122 #undef _
123     VNET_CRYPTO_OP_N_STATUS,
124 } vnet_crypto_op_status_t;
125
126 /* *INDENT-OFF* */
127 typedef enum
128 {
129   VNET_CRYPTO_ALG_NONE = 0,
130 #define _(n, s, l) VNET_CRYPTO_ALG_##n,
131   foreach_crypto_cipher_alg
132   foreach_crypto_aead_alg
133 #undef _
134 #define _(n, s) VNET_CRYPTO_ALG_HMAC_##n,
135   foreach_crypto_hmac_alg
136 #undef _
137   VNET_CRYPTO_N_ALGS,
138 } vnet_crypto_alg_t;
139
140 typedef enum
141 {
142 #define _(n, s) VNET_CRYPTO_ASYNC_OP_TYPE_##n,
143   foreach_crypto_async_op_type
144 #undef _
145     VNET_CRYPTO_ASYNC_OP_N_TYPES,
146 } vnet_crypto_async_op_type_t;
147
148 typedef enum
149 {
150   VNET_CRYPTO_ASYNC_ALG_NONE = 0,
151 #define _(n, s, k, t, a) \
152   VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a,
153   foreach_crypto_aead_async_alg
154 #undef _
155 #define _(c, h, s, k ,d) \
156   VNET_CRYPTO_ALG_##c##_##h##_TAG##d,
157   foreach_crypto_link_async_alg
158 #undef _
159   VNET_CRYPTO_N_ASYNC_ALGS,
160 } vnet_crypto_async_alg_t;
161
162 typedef enum
163 {
164   VNET_CRYPTO_ASYNC_OP_NONE = 0,
165 #define _(n, s, k, t, a) \
166   VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
167   VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC,
168   foreach_crypto_aead_async_alg
169 #undef _
170 #define _(c, h, s, k ,d) \
171   VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
172   VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC,
173   foreach_crypto_link_async_alg
174 #undef _
175   VNET_CRYPTO_ASYNC_OP_N_IDS,
176 } vnet_crypto_async_op_id_t;
177
178 typedef struct
179 {
180   union
181   {
182     struct
183     {
184       u8 *data;
185       vnet_crypto_alg_t alg:8;
186     };
187     struct
188     {
189       u32 index_crypto;
190       u32 index_integ;
191       vnet_crypto_async_alg_t async_alg:8;
192     };
193   };
194 #define VNET_CRYPTO_KEY_TYPE_DATA 0
195 #define VNET_CRYPTO_KEY_TYPE_LINK 1
196   u8 type;
197 } vnet_crypto_key_t;
198
199 typedef enum
200 {
201   VNET_CRYPTO_OP_NONE = 0,
202 #define _(n, s, l) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC,
203   foreach_crypto_cipher_alg
204   foreach_crypto_aead_alg
205 #undef _
206 #define _(n, s) VNET_CRYPTO_OP_##n##_HMAC,
207  foreach_crypto_hmac_alg
208 #undef _
209     VNET_CRYPTO_N_OP_IDS,
210 } vnet_crypto_op_id_t;
211 /* *INDENT-ON* */
212
213 typedef enum
214 {
215   CRYPTO_OP_SIMPLE,
216   CRYPTO_OP_CHAINED,
217   CRYPTO_OP_BOTH,
218 } crypto_op_class_type_t;
219
220 typedef struct
221 {
222   char *name;
223   vnet_crypto_op_id_t op_by_type[VNET_CRYPTO_OP_N_TYPES];
224 } vnet_crypto_alg_data_t;
225
226 typedef struct
227 {
228   u8 *src;
229   u8 *dst;
230   u32 len;
231 } vnet_crypto_op_chunk_t;
232
233 typedef struct
234 {
235   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
236   uword user_data;
237   vnet_crypto_op_id_t op:16;
238   vnet_crypto_op_status_t status:8;
239   u8 flags;
240 #define VNET_CRYPTO_OP_FLAG_INIT_IV (1 << 0)
241 #define VNET_CRYPTO_OP_FLAG_HMAC_CHECK (1 << 1)
242 #define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS (1 << 2)
243
244   union
245   {
246     u8 digest_len;
247     u8 tag_len;
248   };
249   u16 aad_len;
250
251   union
252   {
253     struct
254     {
255       u8 *src;
256       u8 *dst;
257     };
258
259     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
260     u16 n_chunks;
261   };
262
263   union
264   {
265     u32 len;
266     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
267     u32 chunk_index;
268   };
269
270   u32 key_index;
271   u8 *iv;
272   u8 *aad;
273
274   union
275   {
276     u8 *tag;
277     u8 *digest;
278   };
279 } vnet_crypto_op_t;
280
281 STATIC_ASSERT_SIZEOF (vnet_crypto_op_t, CLIB_CACHE_LINE_BYTES);
282
283 typedef struct
284 {
285   vnet_crypto_op_type_t type;
286   vnet_crypto_alg_t alg;
287   u32 active_engine_index_simple;
288   u32 active_engine_index_chained;
289 } vnet_crypto_op_data_t;
290
291 typedef struct
292 {
293   vnet_crypto_async_op_type_t type;
294   vnet_crypto_async_alg_t alg;
295   u32 active_engine_index_async;
296 } vnet_crypto_async_op_data_t;
297
298 typedef struct
299 {
300   char *name;
301   vnet_crypto_async_op_id_t op_by_type[VNET_CRYPTO_ASYNC_OP_N_TYPES];
302 } vnet_crypto_async_alg_data_t;
303
304 typedef struct
305 {
306   u8 *iv;
307   union
308   {
309     u8 *digest;
310     u8 *tag;
311   };
312   u8 *aad;
313   u32 key_index;
314   u32 crypto_total_length;
315   i16 crypto_start_offset; /* first buffer offset */
316   i16 integ_start_offset;
317   /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
318   u16 integ_length_adj;
319   vnet_crypto_op_status_t status : 8;
320   u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */
321 } vnet_crypto_async_frame_elt_t;
322
323 /* Assert the size so the compiler will warn us when it changes */
324 STATIC_ASSERT_SIZEOF (vnet_crypto_async_frame_elt_t, 5 * sizeof (u64));
325
326 typedef enum vnet_crypto_async_frame_state_t_
327 {
328   VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED,
329   /* frame waiting to be processed */
330   VNET_CRYPTO_FRAME_STATE_PENDING,
331   VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS,
332   VNET_CRYPTO_FRAME_STATE_SUCCESS,
333   VNET_CRYPTO_FRAME_STATE_ELT_ERROR
334 } __clib_packed vnet_crypto_async_frame_state_t;
335
336 typedef struct
337 {
338   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
339   vnet_crypto_async_frame_state_t state;
340   vnet_crypto_async_op_id_t op:8;
341   u16 n_elts;
342   vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE];
343   u32 buffer_indices[VNET_CRYPTO_FRAME_SIZE];
344   u16 next_node_index[VNET_CRYPTO_FRAME_SIZE];
345   u32 enqueue_thread_index;
346 } vnet_crypto_async_frame_t;
347
348 typedef struct
349 {
350   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
351   vnet_crypto_async_frame_t *frames[VNET_CRYPTO_ASYNC_OP_N_IDS];
352   vnet_crypto_async_frame_t *frame_pool;
353   u32 *buffer_indices;
354   u16 *nexts;
355 } vnet_crypto_thread_t;
356
357 typedef u32 vnet_crypto_key_index_t;
358
359 typedef u32 (vnet_crypto_chained_ops_handler_t) (vlib_main_t * vm,
360                                                  vnet_crypto_op_t * ops[],
361                                                  vnet_crypto_op_chunk_t *
362                                                  chunks, u32 n_ops);
363
364 typedef u32 (vnet_crypto_ops_handler_t) (vlib_main_t * vm,
365                                          vnet_crypto_op_t * ops[], u32 n_ops);
366
367 typedef void (vnet_crypto_key_handler_t) (vlib_main_t * vm,
368                                           vnet_crypto_key_op_t kop,
369                                           vnet_crypto_key_index_t idx);
370
371 /** async crypto function handlers **/
372 typedef int
373   (vnet_crypto_frame_enqueue_t) (vlib_main_t * vm,
374                                  vnet_crypto_async_frame_t * frame);
375 typedef vnet_crypto_async_frame_t *
376   (vnet_crypto_frame_dequeue_t) (vlib_main_t * vm, u32 * nb_elts_processed,
377                                  u32 * enqueue_thread_idx);
378
379 u32
380 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
381                              char *desc);
382
383 void vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
384                                        vnet_crypto_op_id_t opt,
385                                        vnet_crypto_ops_handler_t * oph);
386
387 void vnet_crypto_register_chained_ops_handler (vlib_main_t * vm,
388                                                u32 engine_index,
389                                                vnet_crypto_op_id_t opt,
390                                                vnet_crypto_chained_ops_handler_t
391                                                * oph);
392
393 void vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
394                                         vnet_crypto_op_id_t opt,
395                                         vnet_crypto_ops_handler_t * fn,
396                                         vnet_crypto_chained_ops_handler_t *
397                                         cfn);
398
399 void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
400                                        vnet_crypto_key_handler_t * keyh);
401
402 /** async crypto register functions */
403 u32 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name);
404 void vnet_crypto_register_async_handler (vlib_main_t * vm,
405                                          u32 engine_index,
406                                          vnet_crypto_async_op_id_t opt,
407                                          vnet_crypto_frame_enqueue_t * enq_fn,
408                                          vnet_crypto_frame_dequeue_t *
409                                          deq_fn);
410
411 typedef struct
412 {
413   char *name;
414   char *desc;
415   int priority;
416   vnet_crypto_key_handler_t *key_op_handler;
417   vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_IDS];
418     vnet_crypto_chained_ops_handler_t
419     * chained_ops_handlers[VNET_CRYPTO_N_OP_IDS];
420   vnet_crypto_frame_enqueue_t *enqueue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
421   vnet_crypto_frame_dequeue_t *dequeue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
422 } vnet_crypto_engine_t;
423
424 typedef struct
425 {
426   u32 node_idx;
427   u32 next_idx;
428 } vnet_crypto_async_next_node_t;
429
430 typedef struct
431 {
432   vnet_crypto_alg_data_t *algs;
433   vnet_crypto_thread_t *threads;
434   vnet_crypto_ops_handler_t **ops_handlers;
435   vnet_crypto_chained_ops_handler_t **chained_ops_handlers;
436   vnet_crypto_frame_enqueue_t **enqueue_handlers;
437   vnet_crypto_frame_dequeue_t **dequeue_handlers;
438   clib_bitmap_t *async_active_ids;
439   vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS];
440   vnet_crypto_async_op_data_t async_opt_data[VNET_CRYPTO_ASYNC_OP_N_IDS];
441   vnet_crypto_engine_t *engines;
442   vnet_crypto_key_t *keys;
443   uword *engine_index_by_name;
444   uword *alg_index_by_name;
445   uword *async_alg_index_by_name;
446   vnet_crypto_async_alg_data_t *async_algs;
447   u32 async_refcnt;
448   vnet_crypto_async_next_node_t *next_nodes;
449   u32 crypto_node_index;
450 #define VNET_CRYPTO_ASYNC_DISPATCH_POLLING 0
451 #define VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT 1
452   u8 dispatch_mode;
453 } vnet_crypto_main_t;
454
455 extern vnet_crypto_main_t crypto_main;
456
457 u32 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
458                                      vnet_crypto_op_chunk_t * chunks,
459                                      u32 n_ops);
460 u32 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
461                              u32 n_ops);
462
463
464 int vnet_crypto_set_handler2 (char *ops_handler_name, char *engine,
465                               crypto_op_class_type_t oct);
466 int vnet_crypto_is_set_handler (vnet_crypto_alg_t alg);
467
468 u32 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg,
469                          u8 * data, u16 length);
470 void vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index);
471
472 /**
473  * Use 2 created keys to generate new key for linked algs (cipher + integ)
474  * The returned key index is to be used for linked alg only.
475  **/
476 u32 vnet_crypto_key_add_linked (vlib_main_t * vm,
477                                 vnet_crypto_key_index_t index_crypto,
478                                 vnet_crypto_key_index_t index_integ);
479
480 clib_error_t *crypto_dispatch_enable_disable (int is_enable);
481
482 int vnet_crypto_set_async_handler2 (char *alg_name, char *engine);
483
484 int vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t opt);
485
486 void vnet_crypto_request_async_mode (int is_enable);
487
488 void vnet_crypto_set_async_dispatch_mode (u8 mode);
489
490 vnet_crypto_async_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
491                                                vnet_crypto_alg_t integ_alg);
492
493 clib_error_t *crypto_dispatch_enable_disable (int is_enable);
494
495 format_function_t format_vnet_crypto_alg;
496 format_function_t format_vnet_crypto_engine;
497 format_function_t format_vnet_crypto_op;
498 format_function_t format_vnet_crypto_op_type;
499 format_function_t format_vnet_crypto_op_status;
500 unformat_function_t unformat_vnet_crypto_alg;
501
502 format_function_t format_vnet_crypto_async_op;
503 format_function_t format_vnet_crypto_async_alg;
504 format_function_t format_vnet_crypto_async_op_type;
505
506 static_always_inline void
507 vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_id_t type)
508 {
509   if (CLIB_DEBUG > 0)
510     clib_memset (op, 0xfe, sizeof (*op));
511   op->op = type;
512   op->flags = 0;
513   op->key_index = ~0;
514   op->n_chunks = 0;
515 }
516
517 static_always_inline vnet_crypto_op_type_t
518 vnet_crypto_get_op_type (vnet_crypto_op_id_t id)
519 {
520   vnet_crypto_main_t *cm = &crypto_main;
521   ASSERT (id < VNET_CRYPTO_N_OP_IDS);
522   vnet_crypto_op_data_t *od = cm->opt_data + id;
523   return od->type;
524 }
525
526 static_always_inline vnet_crypto_key_t *
527 vnet_crypto_get_key (vnet_crypto_key_index_t index)
528 {
529   vnet_crypto_main_t *cm = &crypto_main;
530   return vec_elt_at_index (cm->keys, index);
531 }
532
533 static_always_inline int
534 vnet_crypto_set_handler (char *alg_name, char *engine)
535 {
536   return vnet_crypto_set_handler2 (alg_name, engine, CRYPTO_OP_BOTH);
537 }
538
539 /** async crypto inline functions **/
540
541 static_always_inline vnet_crypto_async_frame_t *
542 vnet_crypto_async_get_frame (vlib_main_t * vm, vnet_crypto_async_op_id_t opt)
543 {
544   vnet_crypto_main_t *cm = &crypto_main;
545   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
546   vnet_crypto_async_frame_t *f = ct->frames[opt];
547
548   if (!f)
549     {
550       pool_get_aligned (ct->frame_pool, f, CLIB_CACHE_LINE_BYTES);
551       if (CLIB_DEBUG > 0)
552         clib_memset (f, 0xfe, sizeof (*f));
553       f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
554       f->op = opt;
555       f->n_elts = 0;
556       ct->frames[opt] = f;
557     }
558   return f;
559 }
560
561 static_always_inline void
562 vnet_crypto_async_free_frame (vlib_main_t * vm,
563                               vnet_crypto_async_frame_t * frame)
564 {
565   vnet_crypto_main_t *cm = &crypto_main;
566   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
567   pool_put (ct->frame_pool, frame);
568 }
569
570 static_always_inline int
571 vnet_crypto_async_submit_open_frame (vlib_main_t * vm,
572                                      vnet_crypto_async_frame_t * frame)
573 {
574   vnet_crypto_main_t *cm = &crypto_main;
575   vlib_thread_main_t *tm = vlib_get_thread_main ();
576   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
577   vnet_crypto_async_op_id_t opt = frame->op;
578   u32 i = vlib_num_workers () > 0;
579
580   frame->state = VNET_CRYPTO_FRAME_STATE_PENDING;
581   frame->enqueue_thread_index = vm->thread_index;
582
583   int ret = (cm->enqueue_handlers[frame->op]) (vm, frame);
584
585   clib_bitmap_set_no_check (cm->async_active_ids, opt, 1);
586   if (PREDICT_TRUE (ret == 0))
587     {
588       vnet_crypto_async_frame_t *nf = 0;
589       pool_get_aligned (ct->frame_pool, nf, CLIB_CACHE_LINE_BYTES);
590       if (CLIB_DEBUG > 0)
591         clib_memset (nf, 0xfe, sizeof (*nf));
592       nf->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
593       nf->op = opt;
594       nf->n_elts = 0;
595       ct->frames[opt] = nf;
596     }
597   else
598     {
599       frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
600     }
601
602   if (cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT)
603     {
604       for (; i < tm->n_vlib_mains; i++)
605         {
606           vlib_node_set_interrupt_pending (vlib_mains[i],
607                                            cm->crypto_node_index);
608         }
609     }
610   return ret;
611 }
612
613 static_always_inline int
614 vnet_crypto_async_add_to_frame (vlib_main_t * vm,
615                                 vnet_crypto_async_frame_t ** frame,
616                                 u32 key_index,
617                                 u32 crypto_len, i16 integ_len_adj,
618                                 i16 crypto_start_offset,
619                                 u16 integ_start_offset,
620                                 u32 buffer_index,
621                                 u16 next_node,
622                                 u8 * iv, u8 * tag, u8 * aad, u8 flags)
623 {
624   vnet_crypto_async_frame_t *f = *frame;
625   vnet_crypto_async_frame_elt_t *fe;
626   u16 index;
627
628   if (PREDICT_FALSE (f->n_elts == VNET_CRYPTO_FRAME_SIZE))
629     {
630       vnet_crypto_async_op_id_t opt = f->op;
631       int ret;
632       ret = vnet_crypto_async_submit_open_frame (vm, f);
633       if (PREDICT_FALSE (ret < 0))
634         return -1;
635       f = vnet_crypto_async_get_frame (vm, opt);
636       *frame = f;
637     }
638
639   index = f->n_elts;
640   fe = &f->elts[index];
641   f->n_elts++;
642   fe->key_index = key_index;
643   fe->crypto_total_length = crypto_len;
644   fe->crypto_start_offset = crypto_start_offset;
645   fe->integ_start_offset = integ_start_offset;
646   fe->integ_length_adj = integ_len_adj;
647   fe->iv = iv;
648   fe->tag = tag;
649   fe->aad = aad;
650   fe->flags = flags;
651   f->buffer_indices[index] = buffer_index;
652   f->next_node_index[index] = next_node;
653
654   return 0;
655 }
656
657 static_always_inline void
658 vnet_crypto_async_reset_frame (vnet_crypto_async_frame_t * f)
659 {
660   vnet_crypto_async_op_id_t opt;
661   ASSERT (f != 0);
662   ASSERT ((f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED
663            || f->state == VNET_CRYPTO_FRAME_STATE_ELT_ERROR));
664   opt = f->op;
665   if (CLIB_DEBUG > 0)
666     clib_memset (f, 0xfe, sizeof (*f));
667   f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
668   f->op = opt;
669   f->n_elts = 0;
670 }
671
672 #endif /* included_vnet_crypto_crypto_h */
673
674 /*
675  * fd.io coding-style-patch-verification: ON
676  *
677  * Local Variables:
678  * eval: (c-set-style "gnu")
679  * End:
680  */