crypto: introduce async crypto infra
[vpp.git] / src / vnet / crypto / crypto.h
1 /*
2  * Copyright (c) 2019 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15
16 #ifndef included_vnet_crypto_crypto_h
17 #define included_vnet_crypto_crypto_h
18
19 #include <vlib/vlib.h>
20
21 #define VNET_CRYPTO_FRAME_SIZE 32
22
23 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
24 #define foreach_crypto_cipher_alg \
25   _(DES_CBC,     "des-cbc", 7) \
26   _(3DES_CBC,    "3des-cbc", 24) \
27   _(AES_128_CBC, "aes-128-cbc", 16) \
28   _(AES_192_CBC, "aes-192-cbc", 24) \
29   _(AES_256_CBC, "aes-256-cbc", 32) \
30   _(AES_128_CTR, "aes-128-ctr", 16) \
31   _(AES_192_CTR, "aes-192-ctr", 24) \
32   _(AES_256_CTR, "aes-256-ctr", 32)
33
34 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
35 #define foreach_crypto_aead_alg \
36   _(AES_128_GCM, "aes-128-gcm", 16) \
37   _(AES_192_GCM, "aes-192-gcm", 24) \
38   _(AES_256_GCM, "aes-256-gcm", 32)
39
40 #define foreach_crypto_hmac_alg \
41   _(MD5, "md5") \
42   _(SHA1, "sha-1") \
43   _(SHA224, "sha-224")  \
44   _(SHA256, "sha-256")  \
45   _(SHA384, "sha-384")  \
46   _(SHA512, "sha-512")
47
48 #define foreach_crypto_op_type \
49   _(ENCRYPT, "encrypt") \
50   _(DECRYPT, "decrypt") \
51   _(AEAD_ENCRYPT, "aead-encrypt") \
52   _(AEAD_DECRYPT, "aead-decrypt") \
53   _(HMAC, "hmac")
54
55 typedef enum
56 {
57 #define _(n, s) VNET_CRYPTO_OP_TYPE_##n,
58   foreach_crypto_op_type
59 #undef _
60     VNET_CRYPTO_OP_N_TYPES,
61 } vnet_crypto_op_type_t;
62
63 #define foreach_crypto_op_status \
64   _(IDLE, "idle") \
65   _(PENDING, "pending") \
66   _(WORK_IN_PROGRESS, "work-in-progress") \
67   _(COMPLETED, "completed") \
68   _(FAIL_NO_HANDLER, "no-handler") \
69   _(FAIL_BAD_HMAC, "bad-hmac") \
70   _(FAIL_ENGINE_ERR, "engine-error")
71
72 /** async crypto **/
73
74 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, TAG_LEN, AAD_LEN */
75 #define foreach_crypto_aead_async_alg \
76   _(AES_128_GCM, "aes-128-gcm-aad8", 16, 16, 8) \
77   _(AES_128_GCM, "aes-128-gcm-aad12", 16, 16, 12) \
78   _(AES_192_GCM, "aes-192-gcm-aad8", 24, 16, 8) \
79   _(AES_192_GCM, "aes-192-gcm-aad12", 24, 16, 12) \
80   _(AES_256_GCM, "aes-256-gcm-aad8", 32, 16, 8) \
81   _(AES_256_GCM, "aes-256-gcm-aad12", 32, 16, 12)
82
83 /* CRYPTO_ID, INTEG_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, DIGEST_LEN */
84 #define foreach_crypto_link_async_alg \
85   _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12) \
86   _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12) \
87   _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12) \
88   _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14) \
89   _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14) \
90   _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14) \
91   _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16) \
92   _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16) \
93   _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16) \
94   _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24) \
95   _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24) \
96   _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24) \
97   _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32) \
98   _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32) \
99   _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32)
100
101 #define foreach_crypto_async_op_type \
102   _(ENCRYPT, "async-encrypt") \
103   _(DECRYPT, "async-decrypt")
104
105 typedef enum
106 {
107   VNET_CRYPTO_KEY_OP_ADD,
108   VNET_CRYPTO_KEY_OP_DEL,
109   VNET_CRYPTO_KEY_OP_MODIFY,
110 } vnet_crypto_key_op_t;
111
112 typedef enum
113 {
114 #define _(n, s) VNET_CRYPTO_OP_STATUS_##n,
115   foreach_crypto_op_status
116 #undef _
117     VNET_CRYPTO_OP_N_STATUS,
118 } vnet_crypto_op_status_t;
119
120 /* *INDENT-OFF* */
121 typedef enum
122 {
123   VNET_CRYPTO_ALG_NONE = 0,
124 #define _(n, s, l) VNET_CRYPTO_ALG_##n,
125   foreach_crypto_cipher_alg
126   foreach_crypto_aead_alg
127 #undef _
128 #define _(n, s) VNET_CRYPTO_ALG_HMAC_##n,
129   foreach_crypto_hmac_alg
130 #undef _
131   VNET_CRYPTO_N_ALGS,
132 } vnet_crypto_alg_t;
133
134 typedef enum
135 {
136 #define _(n, s) VNET_CRYPTO_ASYNC_OP_TYPE_##n,
137   foreach_crypto_async_op_type
138 #undef _
139     VNET_CRYPTO_ASYNC_OP_N_TYPES,
140 } vnet_crypto_async_op_type_t;
141
142 typedef enum
143 {
144   VNET_CRYPTO_ASYNC_ALG_NONE = 0,
145 #define _(n, s, k, t, a) \
146   VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a,
147   foreach_crypto_aead_async_alg
148 #undef _
149 #define _(c, h, s, k ,d) \
150   VNET_CRYPTO_ALG_##c##_##h##_TAG##d,
151   foreach_crypto_link_async_alg
152 #undef _
153   VNET_CRYPTO_N_ASYNC_ALGS,
154 } vnet_crypto_async_alg_t;
155
156 typedef enum
157 {
158   VNET_CRYPTO_ASYNC_OP_NONE = 0,
159 #define _(n, s, k, t, a) \
160   VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
161   VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC,
162   foreach_crypto_aead_async_alg
163 #undef _
164 #define _(c, h, s, k ,d) \
165   VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
166   VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC,
167   foreach_crypto_link_async_alg
168 #undef _
169   VNET_CRYPTO_ASYNC_OP_N_IDS,
170 } vnet_crypto_async_op_id_t;
171
172 typedef struct
173 {
174   union
175   {
176     struct
177     {
178       u8 *data;
179       vnet_crypto_alg_t alg:8;
180     };
181     struct
182     {
183       u32 index_crypto;
184       u32 index_integ;
185       vnet_crypto_async_alg_t async_alg:8;
186     };
187   };
188 #define VNET_CRYPTO_KEY_TYPE_DATA 0
189 #define VNET_CRYPTO_KEY_TYPE_LINK 1
190   u8 type;
191 } vnet_crypto_key_t;
192
193 typedef enum
194 {
195   VNET_CRYPTO_OP_NONE = 0,
196 #define _(n, s, l) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC,
197   foreach_crypto_cipher_alg
198   foreach_crypto_aead_alg
199 #undef _
200 #define _(n, s) VNET_CRYPTO_OP_##n##_HMAC,
201  foreach_crypto_hmac_alg
202 #undef _
203     VNET_CRYPTO_N_OP_IDS,
204 } vnet_crypto_op_id_t;
205 /* *INDENT-ON* */
206
207
208 typedef enum
209 {
210   CRYPTO_OP_SIMPLE,
211   CRYPTO_OP_CHAINED,
212   CRYPTO_OP_BOTH,
213 } crypto_op_class_type_t;
214
215 typedef struct
216 {
217   char *name;
218   vnet_crypto_op_id_t op_by_type[VNET_CRYPTO_OP_N_TYPES];
219 } vnet_crypto_alg_data_t;
220
221 typedef struct
222 {
223   u8 *src;
224   u8 *dst;
225   u32 len;
226 } vnet_crypto_op_chunk_t;
227
228 typedef struct
229 {
230   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
231   uword user_data;
232   vnet_crypto_op_id_t op:16;
233   vnet_crypto_op_status_t status:8;
234   u8 flags;
235 #define VNET_CRYPTO_OP_FLAG_INIT_IV (1 << 0)
236 #define VNET_CRYPTO_OP_FLAG_HMAC_CHECK (1 << 1)
237 #define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS (1 << 2)
238
239   union
240   {
241     u8 digest_len;
242     u8 tag_len;
243   };
244   u16 aad_len;
245
246   union
247   {
248     struct
249     {
250       u8 *src;
251       u8 *dst;
252     };
253
254     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
255     u16 n_chunks;
256   };
257
258   union
259   {
260     u32 len;
261     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
262     u32 chunk_index;
263   };
264
265   u32 key_index;
266   u8 *iv;
267   u8 *aad;
268
269   union
270   {
271     u8 *tag;
272     u8 *digest;
273   };
274 } vnet_crypto_op_t;
275
276 STATIC_ASSERT_SIZEOF (vnet_crypto_op_t, CLIB_CACHE_LINE_BYTES);
277
278 typedef struct
279 {
280   vnet_crypto_op_type_t type;
281   vnet_crypto_alg_t alg;
282   u32 active_engine_index_simple;
283   u32 active_engine_index_chained;
284 } vnet_crypto_op_data_t;
285
286 typedef struct
287 {
288   vnet_crypto_async_op_type_t type;
289   vnet_crypto_async_alg_t alg;
290   u32 active_engine_index_async;
291 } vnet_crypto_async_op_data_t;
292
293 typedef struct
294 {
295   char *name;
296   vnet_crypto_async_op_id_t op_by_type[VNET_CRYPTO_ASYNC_OP_N_TYPES];
297 } vnet_crypto_async_alg_data_t;
298
299 typedef struct
300 {
301   vnet_crypto_op_status_t status:8;
302   u32 key_index;
303   i16 crypto_start_offset;      /* first buffer offset */
304   i16 integ_start_offset;
305   u32 crypto_total_length;
306   /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
307   u16 integ_length_adj;
308   u8 *iv;
309   union
310   {
311     u8 *digest;
312     u8 *tag;
313   };
314   u8 *aad;
315   u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */
316 } vnet_crypto_async_frame_elt_t;
317
318 typedef struct
319 {
320   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
321 #define VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED 0
322 #define VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS 1
323 #define VNET_CRYPTO_FRAME_STATE_SUCCESS 2
324 #define VNET_CRYPTO_FRAME_STATE_ELT_ERROR 3
325   u8 state;
326   vnet_crypto_async_op_id_t op:8;
327   u16 n_elts;
328   vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE];
329   u32 buffer_indices[VNET_CRYPTO_FRAME_SIZE];
330   u16 next_node_index[VNET_CRYPTO_FRAME_SIZE];
331 } vnet_crypto_async_frame_t;
332
333 typedef struct
334 {
335   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
336   vnet_crypto_async_frame_t *frames[VNET_CRYPTO_ASYNC_OP_N_IDS];
337   vnet_crypto_async_frame_t *frame_pool;
338   u32 *buffer_indice;
339   u16 *nexts;
340 } vnet_crypto_thread_t;
341
342 typedef u32 vnet_crypto_key_index_t;
343
344 typedef u32 (vnet_crypto_chained_ops_handler_t) (vlib_main_t * vm,
345                                                  vnet_crypto_op_t * ops[],
346                                                  vnet_crypto_op_chunk_t *
347                                                  chunks, u32 n_ops);
348
349 typedef u32 (vnet_crypto_ops_handler_t) (vlib_main_t * vm,
350                                          vnet_crypto_op_t * ops[], u32 n_ops);
351
352 typedef void (vnet_crypto_key_handler_t) (vlib_main_t * vm,
353                                           vnet_crypto_key_op_t kop,
354                                           vnet_crypto_key_index_t idx);
355
356 /** async crypto function handlers **/
357 typedef int (vnet_crypto_frame_enqueue_t) (vlib_main_t * vm,
358                                            vnet_crypto_async_frame_t * frame);
359 typedef vnet_crypto_async_frame_t *
360   (vnet_crypto_frame_dequeue_t) (vlib_main_t * vm);
361
362 u32 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
363                                  char *desc);
364
365 void vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
366                                        vnet_crypto_op_id_t opt,
367                                        vnet_crypto_ops_handler_t * oph);
368
369 void vnet_crypto_register_chained_ops_handler (vlib_main_t * vm,
370                                                u32 engine_index,
371                                                vnet_crypto_op_id_t opt,
372                                                vnet_crypto_chained_ops_handler_t
373                                                * oph);
374
375 void vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
376                                         vnet_crypto_op_id_t opt,
377                                         vnet_crypto_ops_handler_t * fn,
378                                         vnet_crypto_chained_ops_handler_t *
379                                         cfn);
380
381 void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
382                                        vnet_crypto_key_handler_t * keyh);
383
384 /** async crypto register functions */
385 u32 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name);
386 void vnet_crypto_register_async_handler (vlib_main_t * vm,
387                                          u32 engine_index,
388                                          vnet_crypto_async_op_id_t opt,
389                                          vnet_crypto_frame_enqueue_t * enq_fn,
390                                          vnet_crypto_frame_dequeue_t *
391                                          deq_fn);
392
393 typedef struct
394 {
395   char *name;
396   char *desc;
397   int priority;
398   vnet_crypto_key_handler_t *key_op_handler;
399   vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_IDS];
400     vnet_crypto_chained_ops_handler_t
401     * chained_ops_handlers[VNET_CRYPTO_N_OP_IDS];
402   vnet_crypto_frame_enqueue_t *enqueue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
403   vnet_crypto_frame_dequeue_t *dequeue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
404 } vnet_crypto_engine_t;
405
406 typedef struct
407 {
408   u32 node_idx;
409   u32 next_idx;
410 } vnet_crypto_async_next_node_t;
411
412 typedef struct
413 {
414   vnet_crypto_alg_data_t *algs;
415   vnet_crypto_thread_t *threads;
416   vnet_crypto_ops_handler_t **ops_handlers;
417   vnet_crypto_chained_ops_handler_t **chained_ops_handlers;
418   vnet_crypto_frame_enqueue_t **enqueue_handlers;
419   vnet_crypto_frame_dequeue_t **dequeue_handlers;
420   clib_bitmap_t *async_active_ids;
421   vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS];
422   vnet_crypto_async_op_data_t async_opt_data[VNET_CRYPTO_ASYNC_OP_N_IDS];
423   vnet_crypto_engine_t *engines;
424   vnet_crypto_key_t *keys;
425   uword *engine_index_by_name;
426   uword *alg_index_by_name;
427   uword *async_alg_index_by_name;
428   vnet_crypto_async_alg_data_t *async_algs;
429   u32 async_refcnt;
430   vnet_crypto_async_next_node_t *next_nodes;
431 } vnet_crypto_main_t;
432
433 extern vnet_crypto_main_t crypto_main;
434
435 u32 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
436                                      vnet_crypto_op_chunk_t * chunks,
437                                      u32 n_ops);
438 u32 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
439                              u32 n_ops);
440
441
442 int vnet_crypto_set_handler2 (char *ops_handler_name, char *engine,
443                               crypto_op_class_type_t oct);
444 int vnet_crypto_is_set_handler (vnet_crypto_alg_t alg);
445
446 u32 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg,
447                          u8 * data, u16 length);
448 void vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index);
449
450 /**
451  * Use 2 created keys to generate new key for linked algs (cipher + integ)
452  * The returned key index is to be used for linked alg only.
453  **/
454 u32 vnet_crypto_key_add_linked (vlib_main_t * vm,
455                                 vnet_crypto_key_index_t index_crypto,
456                                 vnet_crypto_key_index_t index_integ);
457
458 clib_error_t *crypto_dispatch_enable_disable (int is_enable);
459
460 int vnet_crypto_set_async_handler2 (char *alg_name, char *engine);
461
462 int vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t opt);
463
464 void vnet_crypto_request_async_mode (int is_enable);
465
466 vnet_crypto_async_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
467                                                vnet_crypto_alg_t integ_alg);
468
469 clib_error_t *crypto_dispatch_enable_disable (int is_enable);
470
471 format_function_t format_vnet_crypto_alg;
472 format_function_t format_vnet_crypto_engine;
473 format_function_t format_vnet_crypto_op;
474 format_function_t format_vnet_crypto_op_type;
475 format_function_t format_vnet_crypto_op_status;
476 unformat_function_t unformat_vnet_crypto_alg;
477
478 format_function_t format_vnet_crypto_async_op;
479 format_function_t format_vnet_crypto_async_alg;
480 format_function_t format_vnet_crypto_async_op_type;
481
482 static_always_inline void
483 vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_id_t type)
484 {
485   if (CLIB_DEBUG > 0)
486     clib_memset (op, 0xfe, sizeof (*op));
487   op->op = type;
488   op->flags = 0;
489   op->key_index = ~0;
490   op->n_chunks = 0;
491 }
492
493 static_always_inline vnet_crypto_op_type_t
494 vnet_crypto_get_op_type (vnet_crypto_op_id_t id)
495 {
496   vnet_crypto_main_t *cm = &crypto_main;
497   ASSERT (id < VNET_CRYPTO_N_OP_IDS);
498   vnet_crypto_op_data_t *od = cm->opt_data + id;
499   return od->type;
500 }
501
502 static_always_inline vnet_crypto_key_t *
503 vnet_crypto_get_key (vnet_crypto_key_index_t index)
504 {
505   vnet_crypto_main_t *cm = &crypto_main;
506   return vec_elt_at_index (cm->keys, index);
507 }
508
509 static_always_inline int
510 vnet_crypto_set_handler (char *alg_name, char *engine)
511 {
512   return vnet_crypto_set_handler2 (alg_name, engine, CRYPTO_OP_BOTH);
513 }
514
515 /** async crypto inline functions **/
516
517 static_always_inline vnet_crypto_async_frame_t *
518 vnet_crypto_async_get_frame (vlib_main_t * vm, vnet_crypto_async_op_id_t opt)
519 {
520   vnet_crypto_main_t *cm = &crypto_main;
521   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
522   vnet_crypto_async_frame_t *f = ct->frames[opt];
523
524   if (!f)
525     {
526       pool_get_aligned (ct->frame_pool, f, CLIB_CACHE_LINE_BYTES);
527       if (CLIB_DEBUG > 0)
528         clib_memset (f, 0xfe, sizeof (*f));
529       f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
530       f->op = opt;
531       f->n_elts = 0;
532       ct->frames[opt] = f;
533     }
534   return f;
535 }
536
537 static_always_inline void
538 vnet_crypto_async_free_frame (vlib_main_t * vm,
539                               vnet_crypto_async_frame_t * frame)
540 {
541   vnet_crypto_main_t *cm = &crypto_main;
542   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
543   pool_put (ct->frame_pool, frame);
544 }
545
546 static_always_inline int
547 vnet_crypto_async_submit_open_frame (vlib_main_t * vm,
548                                      vnet_crypto_async_frame_t * frame)
549 {
550   vnet_crypto_main_t *cm = &crypto_main;
551   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
552   vnet_crypto_async_op_id_t opt = frame->op;
553   int ret = (cm->enqueue_handlers[frame->op]) (vm, frame);
554   clib_bitmap_set_no_check (cm->async_active_ids, opt, 1);
555   if (PREDICT_TRUE (ret == 0))
556     {
557       vnet_crypto_async_frame_t *nf = 0;
558       frame->state = VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS;
559       pool_get_aligned (ct->frame_pool, nf, CLIB_CACHE_LINE_BYTES);
560       if (CLIB_DEBUG > 0)
561         clib_memset (nf, 0xfe, sizeof (*nf));
562       nf->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
563       nf->op = opt;
564       nf->n_elts = 0;
565       ct->frames[opt] = nf;
566     }
567   return ret;
568 }
569
570 static_always_inline int
571 vnet_crypto_async_add_to_frame (vlib_main_t * vm,
572                                 vnet_crypto_async_frame_t ** frame,
573                                 u32 key_index,
574                                 u32 crypto_len, i16 integ_len_adj,
575                                 i16 crypto_start_offset,
576                                 u16 integ_start_offset,
577                                 u32 buffer_index,
578                                 u16 next_node,
579                                 u8 * iv, u8 * tag, u8 * aad, u8 flags)
580 {
581   vnet_crypto_async_frame_t *f = *frame;
582   vnet_crypto_async_frame_elt_t *fe;
583   u16 index;
584
585   if (PREDICT_FALSE (f->n_elts == VNET_CRYPTO_FRAME_SIZE))
586     {
587       vnet_crypto_async_op_id_t opt = f->op;
588       int ret;
589       ret = vnet_crypto_async_submit_open_frame (vm, f);
590       if (PREDICT_FALSE (ret < 0))
591         return -1;
592       f = vnet_crypto_async_get_frame (vm, opt);
593       *frame = f;
594     }
595
596   index = f->n_elts;
597   fe = &f->elts[index];
598   f->n_elts++;
599   fe->key_index = key_index;
600   fe->crypto_total_length = crypto_len;
601   fe->crypto_start_offset = crypto_start_offset;
602   fe->integ_start_offset = integ_start_offset;
603   fe->integ_length_adj = integ_len_adj;
604   fe->iv = iv;
605   fe->tag = tag;
606   fe->aad = aad;
607   fe->flags = flags;
608   f->buffer_indices[index] = buffer_index;
609   f->next_node_index[index] = next_node;
610
611   return 0;
612 }
613
614 static_always_inline void
615 vnet_crypto_async_reset_frame (vnet_crypto_async_frame_t * f)
616 {
617   vnet_crypto_async_op_id_t opt;
618   ASSERT (f != 0);
619   ASSERT (f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED);
620   opt = f->op;
621   if (CLIB_DEBUG > 0)
622     clib_memset (f, 0xfe, sizeof (*f));
623   f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
624   f->op = opt;
625   f->n_elts = 0;
626 }
627
628 #endif /* included_vnet_crypto_crypto_h */
629
630 /*
631  * fd.io coding-style-patch-verification: ON
632  *
633  * Local Variables:
634  * eval: (c-set-style "gnu")
635  * End:
636  */