docs: Use newer Ubuntu LTS in tutorial
[vpp.git] / src / vnet / crypto / crypto.h
1 /*
2  * Copyright (c) 2019 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15
16 #ifndef included_vnet_crypto_crypto_h
17 #define included_vnet_crypto_crypto_h
18
19 #include <vlib/vlib.h>
20
21 #define VNET_CRYPTO_FRAME_SIZE 64
22 #define VNET_CRYPTO_FRAME_POOL_SIZE 1024
23
24 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
25 #define foreach_crypto_cipher_alg \
26   _(DES_CBC,     "des-cbc", 7) \
27   _(3DES_CBC,    "3des-cbc", 24) \
28   _(AES_128_CBC, "aes-128-cbc", 16) \
29   _(AES_192_CBC, "aes-192-cbc", 24) \
30   _(AES_256_CBC, "aes-256-cbc", 32) \
31   _(AES_128_CTR, "aes-128-ctr", 16) \
32   _(AES_192_CTR, "aes-192-ctr", 24) \
33   _(AES_256_CTR, "aes-256-ctr", 32)
34
35 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
36 #define foreach_crypto_aead_alg                                               \
37   _ (AES_128_GCM, "aes-128-gcm", 16)                                          \
38   _ (AES_192_GCM, "aes-192-gcm", 24)                                          \
39   _ (AES_256_GCM, "aes-256-gcm", 32)                                          \
40   _ (AES_128_NULL_GMAC, "aes-128-null-gmac", 16)                              \
41   _ (AES_192_NULL_GMAC, "aes-192-null-gmac", 24)                              \
42   _ (AES_256_NULL_GMAC, "aes-256-null-gmac", 32)                              \
43   _ (CHACHA20_POLY1305, "chacha20-poly1305", 32)
44
45 #define foreach_crypto_hash_alg                                               \
46   _ (SHA1, "sha-1")                                                           \
47   _ (SHA224, "sha-224")                                                       \
48   _ (SHA256, "sha-256")                                                       \
49   _ (SHA384, "sha-384")                                                       \
50   _ (SHA512, "sha-512")
51
52 #define foreach_crypto_hmac_alg \
53   _(MD5, "md5") \
54   _(SHA1, "sha-1") \
55   _(SHA224, "sha-224")  \
56   _(SHA256, "sha-256")  \
57   _(SHA384, "sha-384")  \
58   _(SHA512, "sha-512")
59
60 #define foreach_crypto_op_type                                                \
61   _ (ENCRYPT, "encrypt")                                                      \
62   _ (DECRYPT, "decrypt")                                                      \
63   _ (AEAD_ENCRYPT, "aead-encrypt")                                            \
64   _ (AEAD_DECRYPT, "aead-decrypt")                                            \
65   _ (HMAC, "hmac")                                                            \
66   _ (HASH, "hash")
67
68 typedef enum
69 {
70 #define _(n, s) VNET_CRYPTO_OP_TYPE_##n,
71   foreach_crypto_op_type
72 #undef _
73     VNET_CRYPTO_OP_N_TYPES,
74 } vnet_crypto_op_type_t;
75
76 #define foreach_crypto_op_status \
77   _(IDLE, "idle") \
78   _(PENDING, "pending") \
79   _(WORK_IN_PROGRESS, "work-in-progress") \
80   _(COMPLETED, "completed") \
81   _(FAIL_NO_HANDLER, "no-handler") \
82   _(FAIL_BAD_HMAC, "bad-hmac") \
83   _(FAIL_ENGINE_ERR, "engine-error")
84
85 /** async crypto **/
86
87 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, TAG_LEN, AAD_LEN */
88 #define foreach_crypto_aead_async_alg                                         \
89   _ (AES_128_GCM, "aes-128-gcm-aad8", 16, 16, 8)                              \
90   _ (AES_128_GCM, "aes-128-gcm-aad12", 16, 16, 12)                            \
91   _ (AES_192_GCM, "aes-192-gcm-aad8", 24, 16, 8)                              \
92   _ (AES_192_GCM, "aes-192-gcm-aad12", 24, 16, 12)                            \
93   _ (AES_256_GCM, "aes-256-gcm-aad8", 32, 16, 8)                              \
94   _ (AES_256_GCM, "aes-256-gcm-aad12", 32, 16, 12)                            \
95   _ (AES_128_NULL_GMAC, "aes-128-null-gmac-aad8", 16, 16, 8)                  \
96   _ (AES_128_NULL_GMAC, "aes-128-null-gmac-aad12", 16, 16, 12)                \
97   _ (AES_192_NULL_GMAC, "aes-192-null-gmac-aad8", 24, 16, 8)                  \
98   _ (AES_192_NULL_GMAC, "aes-192-null-gmac-aad12", 24, 16, 12)                \
99   _ (AES_256_NULL_GMAC, "aes-256-null-gmac-aad8", 32, 16, 8)                  \
100   _ (AES_256_NULL_GMAC, "aes-256-null-gmac-aad12", 32, 16, 12)                \
101   _ (CHACHA20_POLY1305, "chacha20-poly1305-aad8", 32, 16, 8)                  \
102   _ (CHACHA20_POLY1305, "chacha20-poly1305-aad12", 32, 16, 12)                \
103   _ (CHACHA20_POLY1305, "chacha20-poly1305", 32, 16, 0)
104
105 /* CRYPTO_ID, INTEG_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, DIGEST_LEN */
106 #define foreach_crypto_link_async_alg                                         \
107   _ (3DES_CBC, MD5, "3des-cbc-hmac-md5", 24, 12)                              \
108   _ (AES_128_CBC, MD5, "aes-128-cbc-hmac-md5", 16, 12)                        \
109   _ (AES_192_CBC, MD5, "aes-192-cbc-hmac-md5", 24, 12)                        \
110   _ (AES_256_CBC, MD5, "aes-256-cbc-hmac-md5", 32, 12)                        \
111   _ (3DES_CBC, SHA1, "3des-cbc-hmac-sha-1", 24, 12)                           \
112   _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12)                     \
113   _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12)                     \
114   _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12)                     \
115   _ (3DES_CBC, SHA224, "3des-cbc-hmac-sha-224", 24, 14)                       \
116   _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14)                 \
117   _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14)                 \
118   _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14)                 \
119   _ (3DES_CBC, SHA256, "3des-cbc-hmac-sha-256", 24, 16)                       \
120   _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16)                 \
121   _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16)                 \
122   _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16)                 \
123   _ (3DES_CBC, SHA384, "3des-cbc-hmac-sha-384", 24, 24)                       \
124   _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24)                 \
125   _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24)                 \
126   _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24)                 \
127   _ (3DES_CBC, SHA512, "3des-cbc-hmac-sha-512", 24, 32)                       \
128   _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32)                 \
129   _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32)                 \
130   _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32)                 \
131   _ (AES_128_CTR, SHA1, "aes-128-ctr-hmac-sha-1", 16, 12)                     \
132   _ (AES_192_CTR, SHA1, "aes-192-ctr-hmac-sha-1", 24, 12)                     \
133   _ (AES_256_CTR, SHA1, "aes-256-ctr-hmac-sha-1", 32, 12)
134
135 #define foreach_crypto_async_op_type \
136   _(ENCRYPT, "async-encrypt") \
137   _(DECRYPT, "async-decrypt")
138
139 typedef enum
140 {
141   VNET_CRYPTO_KEY_OP_ADD,
142   VNET_CRYPTO_KEY_OP_DEL,
143   VNET_CRYPTO_KEY_OP_MODIFY,
144 } vnet_crypto_key_op_t;
145
146 typedef enum
147 {
148 #define _(n, s) VNET_CRYPTO_OP_STATUS_##n,
149   foreach_crypto_op_status
150 #undef _
151     VNET_CRYPTO_OP_N_STATUS,
152 } vnet_crypto_op_status_t;
153
154 typedef enum
155 {
156   VNET_CRYPTO_ALG_NONE = 0,
157 #define _(n, s, l) VNET_CRYPTO_ALG_##n,
158   foreach_crypto_cipher_alg foreach_crypto_aead_alg
159 #undef _
160 #define _(n, s) VNET_CRYPTO_ALG_HMAC_##n,
161     foreach_crypto_hmac_alg
162 #undef _
163 #define _(n, s) VNET_CRYPTO_ALG_HASH_##n,
164       foreach_crypto_hash_alg
165 #undef _
166         VNET_CRYPTO_N_ALGS,
167 } vnet_crypto_alg_t;
168
169 typedef enum
170 {
171 #define _(n, s) VNET_CRYPTO_ASYNC_OP_TYPE_##n,
172   foreach_crypto_async_op_type
173 #undef _
174     VNET_CRYPTO_ASYNC_OP_N_TYPES,
175 } vnet_crypto_async_op_type_t;
176
177 typedef enum
178 {
179   VNET_CRYPTO_ASYNC_ALG_NONE = 0,
180 #define _(n, s, k, t, a) \
181   VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a,
182   foreach_crypto_aead_async_alg
183 #undef _
184 #define _(c, h, s, k ,d) \
185   VNET_CRYPTO_ALG_##c##_##h##_TAG##d,
186   foreach_crypto_link_async_alg
187 #undef _
188   VNET_CRYPTO_N_ASYNC_ALGS,
189 } vnet_crypto_async_alg_t;
190
191 typedef enum
192 {
193   VNET_CRYPTO_ASYNC_OP_NONE = 0,
194 #define _(n, s, k, t, a) \
195   VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
196   VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC,
197   foreach_crypto_aead_async_alg
198 #undef _
199 #define _(c, h, s, k ,d) \
200   VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
201   VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC,
202   foreach_crypto_link_async_alg
203 #undef _
204   VNET_CRYPTO_ASYNC_OP_N_IDS,
205 } vnet_crypto_async_op_id_t;
206
207 typedef struct
208 {
209   union
210   {
211     struct
212     {
213       u8 *data;
214       vnet_crypto_alg_t alg:8;
215     };
216     struct
217     {
218       u32 index_crypto;
219       u32 index_integ;
220       vnet_crypto_async_alg_t async_alg:8;
221     };
222   };
223 #define VNET_CRYPTO_KEY_TYPE_DATA 0
224 #define VNET_CRYPTO_KEY_TYPE_LINK 1
225   u8 type;
226 } vnet_crypto_key_t;
227
228 typedef enum
229 {
230   VNET_CRYPTO_OP_NONE = 0,
231 #define _(n, s, l) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC,
232   foreach_crypto_cipher_alg foreach_crypto_aead_alg
233 #undef _
234 #define _(n, s) VNET_CRYPTO_OP_##n##_HMAC,
235     foreach_crypto_hmac_alg
236 #undef _
237 #define _(n, s) VNET_CRYPTO_OP_##n##_HASH,
238       foreach_crypto_hash_alg
239 #undef _
240         VNET_CRYPTO_N_OP_IDS,
241 } vnet_crypto_op_id_t;
242
243 typedef enum
244 {
245   CRYPTO_OP_SIMPLE,
246   CRYPTO_OP_CHAINED,
247   CRYPTO_OP_BOTH,
248 } crypto_op_class_type_t;
249
250 typedef struct
251 {
252   char *name;
253   vnet_crypto_op_id_t op_by_type[VNET_CRYPTO_OP_N_TYPES];
254 } vnet_crypto_alg_data_t;
255
256 typedef struct
257 {
258   u8 *src;
259   u8 *dst;
260   u32 len;
261 } vnet_crypto_op_chunk_t;
262
263 typedef struct
264 {
265   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
266   uword user_data;
267   vnet_crypto_op_id_t op:16;
268   vnet_crypto_op_status_t status:8;
269   u8 flags;
270 #define VNET_CRYPTO_OP_FLAG_HMAC_CHECK      (1 << 0)
271 #define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS (1 << 1)
272
273   union
274   {
275     u8 digest_len;
276     u8 tag_len;
277   };
278   u16 aad_len;
279
280   union
281   {
282     struct
283     {
284       u8 *src;
285       u8 *dst;
286     };
287
288     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
289     u16 n_chunks;
290   };
291
292   union
293   {
294     u32 len;
295     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
296     u32 chunk_index;
297   };
298
299   u32 key_index;
300   u8 *iv;
301   u8 *aad;
302
303   union
304   {
305     u8 *tag;
306     u8 *digest;
307   };
308 } vnet_crypto_op_t;
309
310 STATIC_ASSERT_SIZEOF (vnet_crypto_op_t, CLIB_CACHE_LINE_BYTES);
311
312 typedef struct
313 {
314   vnet_crypto_op_type_t type;
315   vnet_crypto_alg_t alg;
316   u32 active_engine_index_simple;
317   u32 active_engine_index_chained;
318 } vnet_crypto_op_data_t;
319
320 typedef struct
321 {
322   vnet_crypto_async_op_type_t type;
323   vnet_crypto_async_alg_t alg;
324   u32 active_engine_index_async;
325 } vnet_crypto_async_op_data_t;
326
327 typedef struct
328 {
329   char *name;
330   vnet_crypto_async_op_id_t op_by_type[VNET_CRYPTO_ASYNC_OP_N_TYPES];
331 } vnet_crypto_async_alg_data_t;
332
333 typedef struct
334 {
335   u8 *iv;
336   union
337   {
338     u8 *digest;
339     u8 *tag;
340   };
341   u8 *aad;
342   u32 key_index;
343   u32 crypto_total_length;
344   i16 crypto_start_offset; /* first buffer offset */
345   i16 integ_start_offset;
346   /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
347   i16 integ_length_adj;
348   vnet_crypto_op_status_t status : 8;
349   u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */
350 } vnet_crypto_async_frame_elt_t;
351
352 /* Assert the size so the compiler will warn us when it changes */
353 STATIC_ASSERT_SIZEOF (vnet_crypto_async_frame_elt_t, 5 * sizeof (u64));
354
355 typedef enum vnet_crypto_async_frame_state_t_
356 {
357   VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED,
358   /* frame waiting to be processed */
359   VNET_CRYPTO_FRAME_STATE_PENDING,
360   VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS,
361   VNET_CRYPTO_FRAME_STATE_SUCCESS,
362   VNET_CRYPTO_FRAME_STATE_ELT_ERROR
363 } __clib_packed vnet_crypto_async_frame_state_t;
364
365 typedef struct
366 {
367   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
368   vnet_crypto_async_frame_state_t state;
369   vnet_crypto_async_op_id_t op:8;
370   u16 n_elts;
371   vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE];
372   u32 buffer_indices[VNET_CRYPTO_FRAME_SIZE];
373   u16 next_node_index[VNET_CRYPTO_FRAME_SIZE];
374   u32 enqueue_thread_index;
375 } vnet_crypto_async_frame_t;
376
377 typedef struct
378 {
379   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
380   vnet_crypto_async_frame_t *frame_pool;
381   u32 *buffer_indices;
382   u16 *nexts;
383 } vnet_crypto_thread_t;
384
385 typedef u32 vnet_crypto_key_index_t;
386
387 typedef u32 (vnet_crypto_chained_ops_handler_t) (vlib_main_t * vm,
388                                                  vnet_crypto_op_t * ops[],
389                                                  vnet_crypto_op_chunk_t *
390                                                  chunks, u32 n_ops);
391
392 typedef u32 (vnet_crypto_ops_handler_t) (vlib_main_t * vm,
393                                          vnet_crypto_op_t * ops[], u32 n_ops);
394
395 typedef void (vnet_crypto_key_handler_t) (vlib_main_t * vm,
396                                           vnet_crypto_key_op_t kop,
397                                           vnet_crypto_key_index_t idx);
398
399 /** async crypto function handlers **/
400 typedef int
401   (vnet_crypto_frame_enqueue_t) (vlib_main_t * vm,
402                                  vnet_crypto_async_frame_t * frame);
403 typedef vnet_crypto_async_frame_t *
404   (vnet_crypto_frame_dequeue_t) (vlib_main_t * vm, u32 * nb_elts_processed,
405                                  u32 * enqueue_thread_idx);
406
407 u32
408 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
409                              char *desc);
410
411 void vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
412                                        vnet_crypto_op_id_t opt,
413                                        vnet_crypto_ops_handler_t * oph);
414
415 void vnet_crypto_register_chained_ops_handler (vlib_main_t * vm,
416                                                u32 engine_index,
417                                                vnet_crypto_op_id_t opt,
418                                                vnet_crypto_chained_ops_handler_t
419                                                * oph);
420
421 void vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
422                                         vnet_crypto_op_id_t opt,
423                                         vnet_crypto_ops_handler_t * fn,
424                                         vnet_crypto_chained_ops_handler_t *
425                                         cfn);
426
427 void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
428                                        vnet_crypto_key_handler_t * keyh);
429
430 /** async crypto register functions */
431 u32 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name);
432
433 void
434 vnet_crypto_register_enqueue_handler (vlib_main_t *vm, u32 engine_index,
435                                       vnet_crypto_async_op_id_t opt,
436                                       vnet_crypto_frame_enqueue_t *enq_fn);
437
438 void
439 vnet_crypto_register_dequeue_handler (vlib_main_t *vm, u32 engine_index,
440                                       vnet_crypto_frame_dequeue_t *deq_fn);
441
442 typedef struct
443 {
444   char *name;
445   char *desc;
446   int priority;
447   vnet_crypto_key_handler_t *key_op_handler;
448   vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_IDS];
449     vnet_crypto_chained_ops_handler_t
450     * chained_ops_handlers[VNET_CRYPTO_N_OP_IDS];
451   vnet_crypto_frame_enqueue_t *enqueue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
452   vnet_crypto_frame_dequeue_t *dequeue_handler;
453 } vnet_crypto_engine_t;
454
455 typedef struct
456 {
457   u32 node_idx;
458   u32 next_idx;
459 } vnet_crypto_async_next_node_t;
460
461 typedef struct
462 {
463   vnet_crypto_alg_data_t *algs;
464   vnet_crypto_thread_t *threads;
465   vnet_crypto_ops_handler_t **ops_handlers;
466   vnet_crypto_chained_ops_handler_t **chained_ops_handlers;
467   vnet_crypto_frame_enqueue_t **enqueue_handlers;
468   vnet_crypto_frame_dequeue_t **dequeue_handlers;
469   vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS];
470   vnet_crypto_async_op_data_t async_opt_data[VNET_CRYPTO_ASYNC_OP_N_IDS];
471   vnet_crypto_engine_t *engines;
472   vnet_crypto_key_t *keys;
473   uword *engine_index_by_name;
474   uword *alg_index_by_name;
475   uword *async_alg_index_by_name;
476   vnet_crypto_async_alg_data_t *async_algs;
477   vnet_crypto_async_next_node_t *next_nodes;
478   u32 crypto_node_index;
479 } vnet_crypto_main_t;
480
481 extern vnet_crypto_main_t crypto_main;
482
483 u32 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
484                                      vnet_crypto_op_chunk_t * chunks,
485                                      u32 n_ops);
486 u32 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
487                              u32 n_ops);
488
489 void vnet_crypto_set_async_dispatch (u8 mode, u8 adaptive);
490 int vnet_crypto_set_handler2 (char *ops_handler_name, char *engine,
491                               crypto_op_class_type_t oct);
492 int vnet_crypto_is_set_handler (vnet_crypto_alg_t alg);
493
494 u32 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg,
495                          u8 * data, u16 length);
496 void vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index);
497 void vnet_crypto_key_update (vlib_main_t *vm, vnet_crypto_key_index_t index);
498
499 /**
500  * Use 2 created keys to generate new key for linked algs (cipher + integ)
501  * The returned key index is to be used for linked alg only.
502  **/
503 u32 vnet_crypto_key_add_linked (vlib_main_t * vm,
504                                 vnet_crypto_key_index_t index_crypto,
505                                 vnet_crypto_key_index_t index_integ);
506
507 int vnet_crypto_set_async_handler2 (char *alg_name, char *engine);
508
509 int vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t opt);
510
511 vnet_crypto_async_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
512                                                vnet_crypto_alg_t integ_alg);
513
514 format_function_t format_vnet_crypto_alg;
515 format_function_t format_vnet_crypto_engine;
516 format_function_t format_vnet_crypto_op;
517 format_function_t format_vnet_crypto_op_type;
518 format_function_t format_vnet_crypto_op_status;
519 unformat_function_t unformat_vnet_crypto_alg;
520
521 format_function_t format_vnet_crypto_async_op;
522 format_function_t format_vnet_crypto_async_alg;
523 format_function_t format_vnet_crypto_async_op_type;
524
525 static_always_inline void
526 vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_id_t type)
527 {
528   if (CLIB_DEBUG > 0)
529     clib_memset (op, 0xfe, sizeof (*op));
530   op->op = type;
531   op->flags = 0;
532   op->key_index = ~0;
533   op->n_chunks = 0;
534 }
535
536 static_always_inline vnet_crypto_op_type_t
537 vnet_crypto_get_op_type (vnet_crypto_op_id_t id)
538 {
539   vnet_crypto_main_t *cm = &crypto_main;
540   ASSERT (id < VNET_CRYPTO_N_OP_IDS);
541   vnet_crypto_op_data_t *od = cm->opt_data + id;
542   return od->type;
543 }
544
545 static_always_inline vnet_crypto_key_t *
546 vnet_crypto_get_key (vnet_crypto_key_index_t index)
547 {
548   vnet_crypto_main_t *cm = &crypto_main;
549   return vec_elt_at_index (cm->keys, index);
550 }
551
552 static_always_inline int
553 vnet_crypto_set_handler (char *alg_name, char *engine)
554 {
555   return vnet_crypto_set_handler2 (alg_name, engine, CRYPTO_OP_BOTH);
556 }
557
558 /** async crypto inline functions **/
559
560 static_always_inline vnet_crypto_async_frame_t *
561 vnet_crypto_async_get_frame (vlib_main_t * vm, vnet_crypto_async_op_id_t opt)
562 {
563   vnet_crypto_main_t *cm = &crypto_main;
564   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
565   vnet_crypto_async_frame_t *f = NULL;
566
567   if (PREDICT_TRUE (pool_free_elts (ct->frame_pool)))
568     {
569       pool_get_aligned (ct->frame_pool, f, CLIB_CACHE_LINE_BYTES);
570 #if CLIB_DEBUG > 0
571       clib_memset (f, 0xfe, sizeof (*f));
572 #endif
573       f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
574       f->op = opt;
575       f->n_elts = 0;
576     }
577
578   return f;
579 }
580
581 static_always_inline void
582 vnet_crypto_async_free_frame (vlib_main_t * vm,
583                               vnet_crypto_async_frame_t * frame)
584 {
585   vnet_crypto_main_t *cm = &crypto_main;
586   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
587   pool_put (ct->frame_pool, frame);
588 }
589
590 static_always_inline int
591 vnet_crypto_async_submit_open_frame (vlib_main_t * vm,
592                                      vnet_crypto_async_frame_t * frame)
593 {
594   vnet_crypto_main_t *cm = &crypto_main;
595   vlib_thread_main_t *tm = vlib_get_thread_main ();
596   u32 i;
597   vlib_node_t *n;
598
599   frame->state = VNET_CRYPTO_FRAME_STATE_PENDING;
600   frame->enqueue_thread_index = vm->thread_index;
601
602   if (PREDICT_FALSE (cm->enqueue_handlers == NULL))
603     {
604       frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
605       return -1;
606     }
607
608   int ret = (cm->enqueue_handlers[frame->op]) (vm, frame);
609
610   if (PREDICT_TRUE (ret == 0))
611     {
612       n = vlib_get_node (vm, cm->crypto_node_index);
613       if (n->state == VLIB_NODE_STATE_INTERRUPT)
614         {
615           for (i = 0; i < tm->n_vlib_mains; i++)
616             vlib_node_set_interrupt_pending (vlib_get_main_by_index (i),
617                                              cm->crypto_node_index);
618         }
619     }
620   else
621     {
622       frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
623     }
624
625   return ret;
626 }
627
628 static_always_inline void
629 vnet_crypto_async_add_to_frame (vlib_main_t *vm, vnet_crypto_async_frame_t *f,
630                                 u32 key_index, u32 crypto_len,
631                                 i16 integ_len_adj, i16 crypto_start_offset,
632                                 i16 integ_start_offset, u32 buffer_index,
633                                 u16 next_node, u8 *iv, u8 *tag, u8 *aad,
634                                 u8 flags)
635 {
636   vnet_crypto_async_frame_elt_t *fe;
637   u16 index;
638
639   ASSERT (f->n_elts < VNET_CRYPTO_FRAME_SIZE);
640
641   index = f->n_elts;
642   fe = &f->elts[index];
643   f->n_elts++;
644   fe->key_index = key_index;
645   fe->crypto_total_length = crypto_len;
646   fe->crypto_start_offset = crypto_start_offset;
647   fe->integ_start_offset = integ_start_offset;
648   fe->integ_length_adj = integ_len_adj;
649   fe->iv = iv;
650   fe->tag = tag;
651   fe->aad = aad;
652   fe->flags = flags;
653   f->buffer_indices[index] = buffer_index;
654   f->next_node_index[index] = next_node;
655 }
656
657 static_always_inline void
658 vnet_crypto_async_reset_frame (vnet_crypto_async_frame_t * f)
659 {
660   vnet_crypto_async_op_id_t opt;
661   ASSERT (f != 0);
662   ASSERT ((f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED
663            || f->state == VNET_CRYPTO_FRAME_STATE_ELT_ERROR));
664   opt = f->op;
665   if (CLIB_DEBUG > 0)
666     clib_memset (f, 0xfe, sizeof (*f));
667   f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
668   f->op = opt;
669   f->n_elts = 0;
670 }
671
672 static_always_inline u8
673 vnet_crypto_async_frame_is_full (const vnet_crypto_async_frame_t *f)
674 {
675   return (f->n_elts == VNET_CRYPTO_FRAME_SIZE);
676 }
677
678 #endif /* included_vnet_crypto_crypto_h */
679
680 /*
681  * fd.io coding-style-patch-verification: ON
682  *
683  * Local Variables:
684  * eval: (c-set-style "gnu")
685  * End:
686  */