wireguard: add async mode for encryption packets
[vpp.git] / src / vnet / crypto / crypto.h
1 /*
2  * Copyright (c) 2019 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15
16 #ifndef included_vnet_crypto_crypto_h
17 #define included_vnet_crypto_crypto_h
18
19 #include <vlib/vlib.h>
20
21 #define VNET_CRYPTO_FRAME_SIZE 64
22 #define VNET_CRYPTO_FRAME_POOL_SIZE 1024
23
24 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
25 #define foreach_crypto_cipher_alg \
26   _(DES_CBC,     "des-cbc", 7) \
27   _(3DES_CBC,    "3des-cbc", 24) \
28   _(AES_128_CBC, "aes-128-cbc", 16) \
29   _(AES_192_CBC, "aes-192-cbc", 24) \
30   _(AES_256_CBC, "aes-256-cbc", 32) \
31   _(AES_128_CTR, "aes-128-ctr", 16) \
32   _(AES_192_CTR, "aes-192-ctr", 24) \
33   _(AES_256_CTR, "aes-256-ctr", 32)
34
35 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
36 #define foreach_crypto_aead_alg \
37   _(AES_128_GCM, "aes-128-gcm", 16) \
38   _(AES_192_GCM, "aes-192-gcm", 24) \
39   _(AES_256_GCM, "aes-256-gcm", 32) \
40   _(CHACHA20_POLY1305, "chacha20-poly1305", 32)
41
42 #define foreach_crypto_hash_alg                                               \
43   _ (SHA1, "sha-1")                                                           \
44   _ (SHA224, "sha-224")                                                       \
45   _ (SHA256, "sha-256")                                                       \
46   _ (SHA384, "sha-384")                                                       \
47   _ (SHA512, "sha-512")
48
49 #define foreach_crypto_hmac_alg \
50   _(MD5, "md5") \
51   _(SHA1, "sha-1") \
52   _(SHA224, "sha-224")  \
53   _(SHA256, "sha-256")  \
54   _(SHA384, "sha-384")  \
55   _(SHA512, "sha-512")
56
57 #define foreach_crypto_op_type                                                \
58   _ (ENCRYPT, "encrypt")                                                      \
59   _ (DECRYPT, "decrypt")                                                      \
60   _ (AEAD_ENCRYPT, "aead-encrypt")                                            \
61   _ (AEAD_DECRYPT, "aead-decrypt")                                            \
62   _ (HMAC, "hmac")                                                            \
63   _ (HASH, "hash")
64
65 typedef enum
66 {
67 #define _(n, s) VNET_CRYPTO_OP_TYPE_##n,
68   foreach_crypto_op_type
69 #undef _
70     VNET_CRYPTO_OP_N_TYPES,
71 } vnet_crypto_op_type_t;
72
73 #define foreach_crypto_op_status \
74   _(IDLE, "idle") \
75   _(PENDING, "pending") \
76   _(WORK_IN_PROGRESS, "work-in-progress") \
77   _(COMPLETED, "completed") \
78   _(FAIL_NO_HANDLER, "no-handler") \
79   _(FAIL_BAD_HMAC, "bad-hmac") \
80   _(FAIL_ENGINE_ERR, "engine-error")
81
82 /** async crypto **/
83
84 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, TAG_LEN, AAD_LEN */
85 #define foreach_crypto_aead_async_alg                                         \
86   _ (AES_128_GCM, "aes-128-gcm-aad8", 16, 16, 8)                              \
87   _ (AES_128_GCM, "aes-128-gcm-aad12", 16, 16, 12)                            \
88   _ (AES_192_GCM, "aes-192-gcm-aad8", 24, 16, 8)                              \
89   _ (AES_192_GCM, "aes-192-gcm-aad12", 24, 16, 12)                            \
90   _ (AES_256_GCM, "aes-256-gcm-aad8", 32, 16, 8)                              \
91   _ (AES_256_GCM, "aes-256-gcm-aad12", 32, 16, 12)                            \
92   _ (CHACHA20_POLY1305, "chacha20-poly1305-aad8", 32, 16, 8)                  \
93   _ (CHACHA20_POLY1305, "chacha20-poly1305-aad12", 32, 16, 12)                \
94   _ (CHACHA20_POLY1305, "chacha20-poly1305", 32, 16, 0)
95
96 /* CRYPTO_ID, INTEG_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, DIGEST_LEN */
97 #define foreach_crypto_link_async_alg                                         \
98   _ (3DES_CBC, MD5, "3des-cbc-hmac-md5", 24, 12)                              \
99   _ (AES_128_CBC, MD5, "aes-128-cbc-hmac-md5", 16, 12)                        \
100   _ (AES_192_CBC, MD5, "aes-192-cbc-hmac-md5", 24, 12)                        \
101   _ (AES_256_CBC, MD5, "aes-256-cbc-hmac-md5", 32, 12)                        \
102   _ (3DES_CBC, SHA1, "3des-cbc-hmac-sha-1", 24, 12)                           \
103   _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12)                     \
104   _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12)                     \
105   _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12)                     \
106   _ (3DES_CBC, SHA224, "3des-cbc-hmac-sha-224", 24, 14)                       \
107   _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14)                 \
108   _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14)                 \
109   _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14)                 \
110   _ (3DES_CBC, SHA256, "3des-cbc-hmac-sha-256", 24, 16)                       \
111   _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16)                 \
112   _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16)                 \
113   _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16)                 \
114   _ (3DES_CBC, SHA384, "3des-cbc-hmac-sha-384", 24, 24)                       \
115   _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24)                 \
116   _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24)                 \
117   _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24)                 \
118   _ (3DES_CBC, SHA512, "3des-cbc-hmac-sha-512", 24, 32)                       \
119   _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32)                 \
120   _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32)                 \
121   _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32)                 \
122   _ (AES_128_CTR, SHA1, "aes-128-ctr-hmac-sha-1", 16, 12)                     \
123   _ (AES_192_CTR, SHA1, "aes-192-ctr-hmac-sha-1", 24, 12)                     \
124   _ (AES_256_CTR, SHA1, "aes-256-ctr-hmac-sha-1", 32, 12)
125
126 #define foreach_crypto_async_op_type \
127   _(ENCRYPT, "async-encrypt") \
128   _(DECRYPT, "async-decrypt")
129
130 typedef enum
131 {
132   VNET_CRYPTO_KEY_OP_ADD,
133   VNET_CRYPTO_KEY_OP_DEL,
134   VNET_CRYPTO_KEY_OP_MODIFY,
135 } vnet_crypto_key_op_t;
136
137 typedef enum
138 {
139 #define _(n, s) VNET_CRYPTO_OP_STATUS_##n,
140   foreach_crypto_op_status
141 #undef _
142     VNET_CRYPTO_OP_N_STATUS,
143 } vnet_crypto_op_status_t;
144
145 /* *INDENT-OFF* */
146 typedef enum
147 {
148   VNET_CRYPTO_ALG_NONE = 0,
149 #define _(n, s, l) VNET_CRYPTO_ALG_##n,
150   foreach_crypto_cipher_alg foreach_crypto_aead_alg
151 #undef _
152 #define _(n, s) VNET_CRYPTO_ALG_HMAC_##n,
153     foreach_crypto_hmac_alg
154 #undef _
155 #define _(n, s) VNET_CRYPTO_ALG_HASH_##n,
156       foreach_crypto_hash_alg
157 #undef _
158         VNET_CRYPTO_N_ALGS,
159 } vnet_crypto_alg_t;
160
161 typedef enum
162 {
163 #define _(n, s) VNET_CRYPTO_ASYNC_OP_TYPE_##n,
164   foreach_crypto_async_op_type
165 #undef _
166     VNET_CRYPTO_ASYNC_OP_N_TYPES,
167 } vnet_crypto_async_op_type_t;
168
169 typedef enum
170 {
171   VNET_CRYPTO_ASYNC_ALG_NONE = 0,
172 #define _(n, s, k, t, a) \
173   VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a,
174   foreach_crypto_aead_async_alg
175 #undef _
176 #define _(c, h, s, k ,d) \
177   VNET_CRYPTO_ALG_##c##_##h##_TAG##d,
178   foreach_crypto_link_async_alg
179 #undef _
180   VNET_CRYPTO_N_ASYNC_ALGS,
181 } vnet_crypto_async_alg_t;
182
183 typedef enum
184 {
185   VNET_CRYPTO_ASYNC_OP_NONE = 0,
186 #define _(n, s, k, t, a) \
187   VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
188   VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC,
189   foreach_crypto_aead_async_alg
190 #undef _
191 #define _(c, h, s, k ,d) \
192   VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
193   VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC,
194   foreach_crypto_link_async_alg
195 #undef _
196   VNET_CRYPTO_ASYNC_OP_N_IDS,
197 } vnet_crypto_async_op_id_t;
198
199 typedef struct
200 {
201   union
202   {
203     struct
204     {
205       u8 *data;
206       vnet_crypto_alg_t alg:8;
207     };
208     struct
209     {
210       u32 index_crypto;
211       u32 index_integ;
212       vnet_crypto_async_alg_t async_alg:8;
213     };
214   };
215 #define VNET_CRYPTO_KEY_TYPE_DATA 0
216 #define VNET_CRYPTO_KEY_TYPE_LINK 1
217   u8 type;
218 } vnet_crypto_key_t;
219
220 typedef enum
221 {
222   VNET_CRYPTO_OP_NONE = 0,
223 #define _(n, s, l) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC,
224   foreach_crypto_cipher_alg foreach_crypto_aead_alg
225 #undef _
226 #define _(n, s) VNET_CRYPTO_OP_##n##_HMAC,
227     foreach_crypto_hmac_alg
228 #undef _
229 #define _(n, s) VNET_CRYPTO_OP_##n##_HASH,
230       foreach_crypto_hash_alg
231 #undef _
232         VNET_CRYPTO_N_OP_IDS,
233 } vnet_crypto_op_id_t;
234 /* *INDENT-ON* */
235
236 typedef enum
237 {
238   CRYPTO_OP_SIMPLE,
239   CRYPTO_OP_CHAINED,
240   CRYPTO_OP_BOTH,
241 } crypto_op_class_type_t;
242
243 typedef struct
244 {
245   char *name;
246   vnet_crypto_op_id_t op_by_type[VNET_CRYPTO_OP_N_TYPES];
247 } vnet_crypto_alg_data_t;
248
249 typedef struct
250 {
251   u8 *src;
252   u8 *dst;
253   u32 len;
254 } vnet_crypto_op_chunk_t;
255
256 typedef struct
257 {
258   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
259   uword user_data;
260   vnet_crypto_op_id_t op:16;
261   vnet_crypto_op_status_t status:8;
262   u8 flags;
263 #define VNET_CRYPTO_OP_FLAG_INIT_IV (1 << 0)
264 #define VNET_CRYPTO_OP_FLAG_HMAC_CHECK (1 << 1)
265 #define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS (1 << 2)
266
267   union
268   {
269     u8 digest_len;
270     u8 tag_len;
271   };
272   u16 aad_len;
273
274   union
275   {
276     struct
277     {
278       u8 *src;
279       u8 *dst;
280     };
281
282     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
283     u16 n_chunks;
284   };
285
286   union
287   {
288     u32 len;
289     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
290     u32 chunk_index;
291   };
292
293   u32 key_index;
294   u8 *iv;
295   u8 *aad;
296
297   union
298   {
299     u8 *tag;
300     u8 *digest;
301   };
302 } vnet_crypto_op_t;
303
304 STATIC_ASSERT_SIZEOF (vnet_crypto_op_t, CLIB_CACHE_LINE_BYTES);
305
306 typedef struct
307 {
308   vnet_crypto_op_type_t type;
309   vnet_crypto_alg_t alg;
310   u32 active_engine_index_simple;
311   u32 active_engine_index_chained;
312 } vnet_crypto_op_data_t;
313
314 typedef struct
315 {
316   vnet_crypto_async_op_type_t type;
317   vnet_crypto_async_alg_t alg;
318   u32 active_engine_index_async;
319 } vnet_crypto_async_op_data_t;
320
321 typedef struct
322 {
323   char *name;
324   vnet_crypto_async_op_id_t op_by_type[VNET_CRYPTO_ASYNC_OP_N_TYPES];
325 } vnet_crypto_async_alg_data_t;
326
327 typedef struct
328 {
329   u8 *iv;
330   union
331   {
332     u8 *digest;
333     u8 *tag;
334   };
335   u8 *aad;
336   u32 key_index;
337   u32 crypto_total_length;
338   i16 crypto_start_offset; /* first buffer offset */
339   i16 integ_start_offset;
340   /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
341   u16 integ_length_adj;
342   vnet_crypto_op_status_t status : 8;
343   u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */
344 } vnet_crypto_async_frame_elt_t;
345
346 /* Assert the size so the compiler will warn us when it changes */
347 STATIC_ASSERT_SIZEOF (vnet_crypto_async_frame_elt_t, 5 * sizeof (u64));
348
349 typedef enum vnet_crypto_async_frame_state_t_
350 {
351   VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED,
352   /* frame waiting to be processed */
353   VNET_CRYPTO_FRAME_STATE_PENDING,
354   VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS,
355   VNET_CRYPTO_FRAME_STATE_SUCCESS,
356   VNET_CRYPTO_FRAME_STATE_ELT_ERROR
357 } __clib_packed vnet_crypto_async_frame_state_t;
358
359 typedef struct
360 {
361   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
362   vnet_crypto_async_frame_state_t state;
363   vnet_crypto_async_op_id_t op:8;
364   u16 n_elts;
365   vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE];
366   u32 buffer_indices[VNET_CRYPTO_FRAME_SIZE];
367   u16 next_node_index[VNET_CRYPTO_FRAME_SIZE];
368   u32 enqueue_thread_index;
369 } vnet_crypto_async_frame_t;
370
371 typedef struct
372 {
373   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
374   vnet_crypto_async_frame_t *frame_pool;
375   u32 *buffer_indices;
376   u16 *nexts;
377 } vnet_crypto_thread_t;
378
379 typedef u32 vnet_crypto_key_index_t;
380
381 typedef u32 (vnet_crypto_chained_ops_handler_t) (vlib_main_t * vm,
382                                                  vnet_crypto_op_t * ops[],
383                                                  vnet_crypto_op_chunk_t *
384                                                  chunks, u32 n_ops);
385
386 typedef u32 (vnet_crypto_ops_handler_t) (vlib_main_t * vm,
387                                          vnet_crypto_op_t * ops[], u32 n_ops);
388
389 typedef void (vnet_crypto_key_handler_t) (vlib_main_t * vm,
390                                           vnet_crypto_key_op_t kop,
391                                           vnet_crypto_key_index_t idx);
392
393 /** async crypto function handlers **/
394 typedef int
395   (vnet_crypto_frame_enqueue_t) (vlib_main_t * vm,
396                                  vnet_crypto_async_frame_t * frame);
397 typedef vnet_crypto_async_frame_t *
398   (vnet_crypto_frame_dequeue_t) (vlib_main_t * vm, u32 * nb_elts_processed,
399                                  u32 * enqueue_thread_idx);
400
401 u32
402 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
403                              char *desc);
404
405 void vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
406                                        vnet_crypto_op_id_t opt,
407                                        vnet_crypto_ops_handler_t * oph);
408
409 void vnet_crypto_register_chained_ops_handler (vlib_main_t * vm,
410                                                u32 engine_index,
411                                                vnet_crypto_op_id_t opt,
412                                                vnet_crypto_chained_ops_handler_t
413                                                * oph);
414
415 void vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
416                                         vnet_crypto_op_id_t opt,
417                                         vnet_crypto_ops_handler_t * fn,
418                                         vnet_crypto_chained_ops_handler_t *
419                                         cfn);
420
421 void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
422                                        vnet_crypto_key_handler_t * keyh);
423
424 /** async crypto register functions */
425 u32 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name);
426
427 void
428 vnet_crypto_register_enqueue_handler (vlib_main_t *vm, u32 engine_index,
429                                       vnet_crypto_async_op_id_t opt,
430                                       vnet_crypto_frame_enqueue_t *enq_fn);
431
432 void
433 vnet_crypto_register_dequeue_handler (vlib_main_t *vm, u32 engine_index,
434                                       vnet_crypto_frame_dequeue_t *deq_fn);
435
436 typedef struct
437 {
438   char *name;
439   char *desc;
440   int priority;
441   vnet_crypto_key_handler_t *key_op_handler;
442   vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_IDS];
443     vnet_crypto_chained_ops_handler_t
444     * chained_ops_handlers[VNET_CRYPTO_N_OP_IDS];
445   vnet_crypto_frame_enqueue_t *enqueue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
446   vnet_crypto_frame_dequeue_t *dequeue_handler;
447 } vnet_crypto_engine_t;
448
449 typedef struct
450 {
451   u32 node_idx;
452   u32 next_idx;
453 } vnet_crypto_async_next_node_t;
454
455 typedef struct
456 {
457   vnet_crypto_alg_data_t *algs;
458   vnet_crypto_thread_t *threads;
459   vnet_crypto_ops_handler_t **ops_handlers;
460   vnet_crypto_chained_ops_handler_t **chained_ops_handlers;
461   vnet_crypto_frame_enqueue_t **enqueue_handlers;
462   vnet_crypto_frame_dequeue_t **dequeue_handlers;
463   vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS];
464   vnet_crypto_async_op_data_t async_opt_data[VNET_CRYPTO_ASYNC_OP_N_IDS];
465   vnet_crypto_engine_t *engines;
466   vnet_crypto_key_t *keys;
467   uword *engine_index_by_name;
468   uword *alg_index_by_name;
469   uword *async_alg_index_by_name;
470   vnet_crypto_async_alg_data_t *async_algs;
471   u32 async_refcnt;
472   vnet_crypto_async_next_node_t *next_nodes;
473   u32 crypto_node_index;
474 #define VNET_CRYPTO_ASYNC_DISPATCH_POLLING 0
475 #define VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT 1
476   u8 dispatch_mode;
477 } vnet_crypto_main_t;
478
479 extern vnet_crypto_main_t crypto_main;
480
481 u32 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
482                                      vnet_crypto_op_chunk_t * chunks,
483                                      u32 n_ops);
484 u32 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
485                              u32 n_ops);
486
487
488 int vnet_crypto_set_handler2 (char *ops_handler_name, char *engine,
489                               crypto_op_class_type_t oct);
490 int vnet_crypto_is_set_handler (vnet_crypto_alg_t alg);
491
492 u32 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg,
493                          u8 * data, u16 length);
494 void vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index);
495
496 /**
497  * Use 2 created keys to generate new key for linked algs (cipher + integ)
498  * The returned key index is to be used for linked alg only.
499  **/
500 u32 vnet_crypto_key_add_linked (vlib_main_t * vm,
501                                 vnet_crypto_key_index_t index_crypto,
502                                 vnet_crypto_key_index_t index_integ);
503
504 clib_error_t *crypto_dispatch_enable_disable (int is_enable);
505
506 int vnet_crypto_set_async_handler2 (char *alg_name, char *engine);
507
508 int vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t opt);
509
510 void vnet_crypto_request_async_mode (int is_enable);
511
512 void vnet_crypto_set_async_dispatch_mode (u8 mode);
513
514 vnet_crypto_async_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
515                                                vnet_crypto_alg_t integ_alg);
516
517 clib_error_t *crypto_dispatch_enable_disable (int is_enable);
518
519 format_function_t format_vnet_crypto_alg;
520 format_function_t format_vnet_crypto_engine;
521 format_function_t format_vnet_crypto_op;
522 format_function_t format_vnet_crypto_op_type;
523 format_function_t format_vnet_crypto_op_status;
524 unformat_function_t unformat_vnet_crypto_alg;
525
526 format_function_t format_vnet_crypto_async_op;
527 format_function_t format_vnet_crypto_async_alg;
528 format_function_t format_vnet_crypto_async_op_type;
529
530 static_always_inline void
531 vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_id_t type)
532 {
533   if (CLIB_DEBUG > 0)
534     clib_memset (op, 0xfe, sizeof (*op));
535   op->op = type;
536   op->flags = 0;
537   op->key_index = ~0;
538   op->n_chunks = 0;
539 }
540
541 static_always_inline vnet_crypto_op_type_t
542 vnet_crypto_get_op_type (vnet_crypto_op_id_t id)
543 {
544   vnet_crypto_main_t *cm = &crypto_main;
545   ASSERT (id < VNET_CRYPTO_N_OP_IDS);
546   vnet_crypto_op_data_t *od = cm->opt_data + id;
547   return od->type;
548 }
549
550 static_always_inline vnet_crypto_key_t *
551 vnet_crypto_get_key (vnet_crypto_key_index_t index)
552 {
553   vnet_crypto_main_t *cm = &crypto_main;
554   return vec_elt_at_index (cm->keys, index);
555 }
556
557 static_always_inline int
558 vnet_crypto_set_handler (char *alg_name, char *engine)
559 {
560   return vnet_crypto_set_handler2 (alg_name, engine, CRYPTO_OP_BOTH);
561 }
562
563 /** async crypto inline functions **/
564
565 static_always_inline vnet_crypto_async_frame_t *
566 vnet_crypto_async_get_frame (vlib_main_t * vm, vnet_crypto_async_op_id_t opt)
567 {
568   vnet_crypto_main_t *cm = &crypto_main;
569   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
570   vnet_crypto_async_frame_t *f = NULL;
571
572   pool_get_aligned (ct->frame_pool, f, CLIB_CACHE_LINE_BYTES);
573   if (CLIB_DEBUG > 0)
574     clib_memset (f, 0xfe, sizeof (*f));
575   f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
576   f->op = opt;
577   f->n_elts = 0;
578
579   return f;
580 }
581
582 static_always_inline void
583 vnet_crypto_async_free_frame (vlib_main_t * vm,
584                               vnet_crypto_async_frame_t * frame)
585 {
586   vnet_crypto_main_t *cm = &crypto_main;
587   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
588   pool_put (ct->frame_pool, frame);
589 }
590
591 static_always_inline int
592 vnet_crypto_async_submit_open_frame (vlib_main_t * vm,
593                                      vnet_crypto_async_frame_t * frame)
594 {
595   vnet_crypto_main_t *cm = &crypto_main;
596   vlib_thread_main_t *tm = vlib_get_thread_main ();
597   u32 i = vlib_num_workers () > 0;
598
599   frame->state = VNET_CRYPTO_FRAME_STATE_PENDING;
600   frame->enqueue_thread_index = vm->thread_index;
601
602   int ret = (cm->enqueue_handlers[frame->op]) (vm, frame);
603
604   if (PREDICT_TRUE (ret == 0))
605     {
606       if (cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT)
607         {
608           for (; i < tm->n_vlib_mains; i++)
609             vlib_node_set_interrupt_pending (vlib_get_main_by_index (i),
610                                              cm->crypto_node_index);
611         }
612     }
613   else
614     {
615       frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
616     }
617
618   return ret;
619 }
620
621 static_always_inline void
622 vnet_crypto_async_add_to_frame (vlib_main_t *vm, vnet_crypto_async_frame_t *f,
623                                 u32 key_index, u32 crypto_len,
624                                 i16 integ_len_adj, i16 crypto_start_offset,
625                                 u16 integ_start_offset, u32 buffer_index,
626                                 u16 next_node, u8 *iv, u8 *tag, u8 *aad,
627                                 u8 flags)
628 {
629   vnet_crypto_async_frame_elt_t *fe;
630   u16 index;
631
632   ASSERT (f->n_elts < VNET_CRYPTO_FRAME_SIZE);
633
634   index = f->n_elts;
635   fe = &f->elts[index];
636   f->n_elts++;
637   fe->key_index = key_index;
638   fe->crypto_total_length = crypto_len;
639   fe->crypto_start_offset = crypto_start_offset;
640   fe->integ_start_offset = integ_start_offset;
641   fe->integ_length_adj = integ_len_adj;
642   fe->iv = iv;
643   fe->tag = tag;
644   fe->aad = aad;
645   fe->flags = flags;
646   f->buffer_indices[index] = buffer_index;
647   f->next_node_index[index] = next_node;
648 }
649
650 static_always_inline void
651 vnet_crypto_async_reset_frame (vnet_crypto_async_frame_t * f)
652 {
653   vnet_crypto_async_op_id_t opt;
654   ASSERT (f != 0);
655   ASSERT ((f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED
656            || f->state == VNET_CRYPTO_FRAME_STATE_ELT_ERROR));
657   opt = f->op;
658   if (CLIB_DEBUG > 0)
659     clib_memset (f, 0xfe, sizeof (*f));
660   f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
661   f->op = opt;
662   f->n_elts = 0;
663 }
664
665 static_always_inline u8
666 vnet_crypto_async_frame_is_full (const vnet_crypto_async_frame_t *f)
667 {
668   return (f->n_elts == VNET_CRYPTO_FRAME_SIZE);
669 }
670
671 #endif /* included_vnet_crypto_crypto_h */
672
673 /*
674  * fd.io coding-style-patch-verification: ON
675  *
676  * Local Variables:
677  * eval: (c-set-style "gnu")
678  * End:
679  */