crypto: allow changing dispatch mode
[vpp.git] / src / vnet / crypto / crypto.h
1 /*
2  * Copyright (c) 2019 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15
16 #ifndef included_vnet_crypto_crypto_h
17 #define included_vnet_crypto_crypto_h
18
19 #include <vlib/vlib.h>
20
21 #define VNET_CRYPTO_FRAME_SIZE 64
22 #define VNET_CRYPTO_FRAME_POOL_SIZE 1024
23
24 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
25 #define foreach_crypto_cipher_alg \
26   _(DES_CBC,     "des-cbc", 7) \
27   _(3DES_CBC,    "3des-cbc", 24) \
28   _(AES_128_CBC, "aes-128-cbc", 16) \
29   _(AES_192_CBC, "aes-192-cbc", 24) \
30   _(AES_256_CBC, "aes-256-cbc", 32) \
31   _(AES_128_CTR, "aes-128-ctr", 16) \
32   _(AES_192_CTR, "aes-192-ctr", 24) \
33   _(AES_256_CTR, "aes-256-ctr", 32)
34
35 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
36 #define foreach_crypto_aead_alg                                               \
37   _ (AES_128_GCM, "aes-128-gcm", 16)                                          \
38   _ (AES_192_GCM, "aes-192-gcm", 24)                                          \
39   _ (AES_256_GCM, "aes-256-gcm", 32)                                          \
40   _ (AES_128_NULL_GMAC, "aes-128-null-gmac", 16)                              \
41   _ (AES_192_NULL_GMAC, "aes-192-null-gmac", 24)                              \
42   _ (AES_256_NULL_GMAC, "aes-256-null-gmac", 32)                              \
43   _ (CHACHA20_POLY1305, "chacha20-poly1305", 32)
44
45 #define foreach_crypto_hash_alg                                               \
46   _ (SHA1, "sha-1")                                                           \
47   _ (SHA224, "sha-224")                                                       \
48   _ (SHA256, "sha-256")                                                       \
49   _ (SHA384, "sha-384")                                                       \
50   _ (SHA512, "sha-512")
51
52 #define foreach_crypto_hmac_alg \
53   _(MD5, "md5") \
54   _(SHA1, "sha-1") \
55   _(SHA224, "sha-224")  \
56   _(SHA256, "sha-256")  \
57   _(SHA384, "sha-384")  \
58   _(SHA512, "sha-512")
59
60 #define foreach_crypto_op_type                                                \
61   _ (ENCRYPT, "encrypt")                                                      \
62   _ (DECRYPT, "decrypt")                                                      \
63   _ (AEAD_ENCRYPT, "aead-encrypt")                                            \
64   _ (AEAD_DECRYPT, "aead-decrypt")                                            \
65   _ (HMAC, "hmac")                                                            \
66   _ (HASH, "hash")
67
68 typedef enum
69 {
70 #define _(n, s) VNET_CRYPTO_OP_TYPE_##n,
71   foreach_crypto_op_type
72 #undef _
73     VNET_CRYPTO_OP_N_TYPES,
74 } vnet_crypto_op_type_t;
75
76 #define foreach_crypto_op_status \
77   _(IDLE, "idle") \
78   _(PENDING, "pending") \
79   _(WORK_IN_PROGRESS, "work-in-progress") \
80   _(COMPLETED, "completed") \
81   _(FAIL_NO_HANDLER, "no-handler") \
82   _(FAIL_BAD_HMAC, "bad-hmac") \
83   _(FAIL_ENGINE_ERR, "engine-error")
84
85 /** async crypto **/
86
87 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, TAG_LEN, AAD_LEN */
88 #define foreach_crypto_aead_async_alg                                         \
89   _ (AES_128_GCM, "aes-128-gcm-aad8", 16, 16, 8)                              \
90   _ (AES_128_GCM, "aes-128-gcm-aad12", 16, 16, 12)                            \
91   _ (AES_192_GCM, "aes-192-gcm-aad8", 24, 16, 8)                              \
92   _ (AES_192_GCM, "aes-192-gcm-aad12", 24, 16, 12)                            \
93   _ (AES_256_GCM, "aes-256-gcm-aad8", 32, 16, 8)                              \
94   _ (AES_256_GCM, "aes-256-gcm-aad12", 32, 16, 12)                            \
95   _ (AES_128_NULL_GMAC, "aes-128-null-gmac-aad8", 16, 16, 8)                  \
96   _ (AES_128_NULL_GMAC, "aes-128-null-gmac-aad12", 16, 16, 12)                \
97   _ (AES_192_NULL_GMAC, "aes-192-null-gmac-aad8", 24, 16, 8)                  \
98   _ (AES_192_NULL_GMAC, "aes-192-null-gmac-aad12", 24, 16, 12)                \
99   _ (AES_256_NULL_GMAC, "aes-256-null-gmac-aad8", 32, 16, 8)                  \
100   _ (AES_256_NULL_GMAC, "aes-256-null-gmac-aad12", 32, 16, 12)                \
101   _ (CHACHA20_POLY1305, "chacha20-poly1305-aad8", 32, 16, 8)                  \
102   _ (CHACHA20_POLY1305, "chacha20-poly1305-aad12", 32, 16, 12)                \
103   _ (CHACHA20_POLY1305, "chacha20-poly1305", 32, 16, 0)
104
105 /* CRYPTO_ID, INTEG_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, DIGEST_LEN */
106 #define foreach_crypto_link_async_alg                                         \
107   _ (3DES_CBC, MD5, "3des-cbc-hmac-md5", 24, 12)                              \
108   _ (AES_128_CBC, MD5, "aes-128-cbc-hmac-md5", 16, 12)                        \
109   _ (AES_192_CBC, MD5, "aes-192-cbc-hmac-md5", 24, 12)                        \
110   _ (AES_256_CBC, MD5, "aes-256-cbc-hmac-md5", 32, 12)                        \
111   _ (3DES_CBC, SHA1, "3des-cbc-hmac-sha-1", 24, 12)                           \
112   _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12)                     \
113   _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12)                     \
114   _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12)                     \
115   _ (3DES_CBC, SHA224, "3des-cbc-hmac-sha-224", 24, 14)                       \
116   _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14)                 \
117   _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14)                 \
118   _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14)                 \
119   _ (3DES_CBC, SHA256, "3des-cbc-hmac-sha-256", 24, 16)                       \
120   _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16)                 \
121   _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16)                 \
122   _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16)                 \
123   _ (3DES_CBC, SHA384, "3des-cbc-hmac-sha-384", 24, 24)                       \
124   _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24)                 \
125   _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24)                 \
126   _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24)                 \
127   _ (3DES_CBC, SHA512, "3des-cbc-hmac-sha-512", 24, 32)                       \
128   _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32)                 \
129   _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32)                 \
130   _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32)                 \
131   _ (AES_128_CTR, SHA1, "aes-128-ctr-hmac-sha-1", 16, 12)                     \
132   _ (AES_192_CTR, SHA1, "aes-192-ctr-hmac-sha-1", 24, 12)                     \
133   _ (AES_256_CTR, SHA1, "aes-256-ctr-hmac-sha-1", 32, 12)
134
135 #define foreach_crypto_async_op_type \
136   _(ENCRYPT, "async-encrypt") \
137   _(DECRYPT, "async-decrypt")
138
139 typedef enum
140 {
141   VNET_CRYPTO_KEY_OP_ADD,
142   VNET_CRYPTO_KEY_OP_DEL,
143   VNET_CRYPTO_KEY_OP_MODIFY,
144 } vnet_crypto_key_op_t;
145
146 typedef enum
147 {
148 #define _(n, s) VNET_CRYPTO_OP_STATUS_##n,
149   foreach_crypto_op_status
150 #undef _
151     VNET_CRYPTO_OP_N_STATUS,
152 } vnet_crypto_op_status_t;
153
154 /* *INDENT-OFF* */
155 typedef enum
156 {
157   VNET_CRYPTO_ALG_NONE = 0,
158 #define _(n, s, l) VNET_CRYPTO_ALG_##n,
159   foreach_crypto_cipher_alg foreach_crypto_aead_alg
160 #undef _
161 #define _(n, s) VNET_CRYPTO_ALG_HMAC_##n,
162     foreach_crypto_hmac_alg
163 #undef _
164 #define _(n, s) VNET_CRYPTO_ALG_HASH_##n,
165       foreach_crypto_hash_alg
166 #undef _
167         VNET_CRYPTO_N_ALGS,
168 } vnet_crypto_alg_t;
169
170 typedef enum
171 {
172 #define _(n, s) VNET_CRYPTO_ASYNC_OP_TYPE_##n,
173   foreach_crypto_async_op_type
174 #undef _
175     VNET_CRYPTO_ASYNC_OP_N_TYPES,
176 } vnet_crypto_async_op_type_t;
177
178 typedef enum
179 {
180   VNET_CRYPTO_ASYNC_ALG_NONE = 0,
181 #define _(n, s, k, t, a) \
182   VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a,
183   foreach_crypto_aead_async_alg
184 #undef _
185 #define _(c, h, s, k ,d) \
186   VNET_CRYPTO_ALG_##c##_##h##_TAG##d,
187   foreach_crypto_link_async_alg
188 #undef _
189   VNET_CRYPTO_N_ASYNC_ALGS,
190 } vnet_crypto_async_alg_t;
191
192 typedef enum
193 {
194   VNET_CRYPTO_ASYNC_OP_NONE = 0,
195 #define _(n, s, k, t, a) \
196   VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
197   VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC,
198   foreach_crypto_aead_async_alg
199 #undef _
200 #define _(c, h, s, k ,d) \
201   VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
202   VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC,
203   foreach_crypto_link_async_alg
204 #undef _
205   VNET_CRYPTO_ASYNC_OP_N_IDS,
206 } vnet_crypto_async_op_id_t;
207
208 typedef struct
209 {
210   union
211   {
212     struct
213     {
214       u8 *data;
215       vnet_crypto_alg_t alg:8;
216     };
217     struct
218     {
219       u32 index_crypto;
220       u32 index_integ;
221       vnet_crypto_async_alg_t async_alg:8;
222     };
223   };
224 #define VNET_CRYPTO_KEY_TYPE_DATA 0
225 #define VNET_CRYPTO_KEY_TYPE_LINK 1
226   u8 type;
227 } vnet_crypto_key_t;
228
229 typedef enum
230 {
231   VNET_CRYPTO_OP_NONE = 0,
232 #define _(n, s, l) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC,
233   foreach_crypto_cipher_alg foreach_crypto_aead_alg
234 #undef _
235 #define _(n, s) VNET_CRYPTO_OP_##n##_HMAC,
236     foreach_crypto_hmac_alg
237 #undef _
238 #define _(n, s) VNET_CRYPTO_OP_##n##_HASH,
239       foreach_crypto_hash_alg
240 #undef _
241         VNET_CRYPTO_N_OP_IDS,
242 } vnet_crypto_op_id_t;
243 /* *INDENT-ON* */
244
245 typedef enum
246 {
247   CRYPTO_OP_SIMPLE,
248   CRYPTO_OP_CHAINED,
249   CRYPTO_OP_BOTH,
250 } crypto_op_class_type_t;
251
252 typedef struct
253 {
254   char *name;
255   vnet_crypto_op_id_t op_by_type[VNET_CRYPTO_OP_N_TYPES];
256 } vnet_crypto_alg_data_t;
257
258 typedef struct
259 {
260   u8 *src;
261   u8 *dst;
262   u32 len;
263 } vnet_crypto_op_chunk_t;
264
265 typedef struct
266 {
267   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
268   uword user_data;
269   vnet_crypto_op_id_t op:16;
270   vnet_crypto_op_status_t status:8;
271   u8 flags;
272 #define VNET_CRYPTO_OP_FLAG_HMAC_CHECK      (1 << 0)
273 #define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS (1 << 1)
274
275   union
276   {
277     u8 digest_len;
278     u8 tag_len;
279   };
280   u16 aad_len;
281
282   union
283   {
284     struct
285     {
286       u8 *src;
287       u8 *dst;
288     };
289
290     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
291     u16 n_chunks;
292   };
293
294   union
295   {
296     u32 len;
297     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
298     u32 chunk_index;
299   };
300
301   u32 key_index;
302   u8 *iv;
303   u8 *aad;
304
305   union
306   {
307     u8 *tag;
308     u8 *digest;
309   };
310 } vnet_crypto_op_t;
311
312 STATIC_ASSERT_SIZEOF (vnet_crypto_op_t, CLIB_CACHE_LINE_BYTES);
313
314 typedef struct
315 {
316   vnet_crypto_op_type_t type;
317   vnet_crypto_alg_t alg;
318   u32 active_engine_index_simple;
319   u32 active_engine_index_chained;
320 } vnet_crypto_op_data_t;
321
322 typedef struct
323 {
324   vnet_crypto_async_op_type_t type;
325   vnet_crypto_async_alg_t alg;
326   u32 active_engine_index_async;
327 } vnet_crypto_async_op_data_t;
328
329 typedef struct
330 {
331   char *name;
332   vnet_crypto_async_op_id_t op_by_type[VNET_CRYPTO_ASYNC_OP_N_TYPES];
333 } vnet_crypto_async_alg_data_t;
334
335 typedef struct
336 {
337   u8 *iv;
338   union
339   {
340     u8 *digest;
341     u8 *tag;
342   };
343   u8 *aad;
344   u32 key_index;
345   u32 crypto_total_length;
346   i16 crypto_start_offset; /* first buffer offset */
347   i16 integ_start_offset;
348   /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
349   i16 integ_length_adj;
350   vnet_crypto_op_status_t status : 8;
351   u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */
352 } vnet_crypto_async_frame_elt_t;
353
354 /* Assert the size so the compiler will warn us when it changes */
355 STATIC_ASSERT_SIZEOF (vnet_crypto_async_frame_elt_t, 5 * sizeof (u64));
356
357 typedef enum vnet_crypto_async_frame_state_t_
358 {
359   VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED,
360   /* frame waiting to be processed */
361   VNET_CRYPTO_FRAME_STATE_PENDING,
362   VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS,
363   VNET_CRYPTO_FRAME_STATE_SUCCESS,
364   VNET_CRYPTO_FRAME_STATE_ELT_ERROR
365 } __clib_packed vnet_crypto_async_frame_state_t;
366
367 typedef struct
368 {
369   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
370   vnet_crypto_async_frame_state_t state;
371   vnet_crypto_async_op_id_t op:8;
372   u16 n_elts;
373   vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE];
374   u32 buffer_indices[VNET_CRYPTO_FRAME_SIZE];
375   u16 next_node_index[VNET_CRYPTO_FRAME_SIZE];
376   u32 enqueue_thread_index;
377 } vnet_crypto_async_frame_t;
378
379 typedef struct
380 {
381   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
382   vnet_crypto_async_frame_t *frame_pool;
383   u32 *buffer_indices;
384   u16 *nexts;
385 } vnet_crypto_thread_t;
386
387 typedef u32 vnet_crypto_key_index_t;
388
389 typedef u32 (vnet_crypto_chained_ops_handler_t) (vlib_main_t * vm,
390                                                  vnet_crypto_op_t * ops[],
391                                                  vnet_crypto_op_chunk_t *
392                                                  chunks, u32 n_ops);
393
394 typedef u32 (vnet_crypto_ops_handler_t) (vlib_main_t * vm,
395                                          vnet_crypto_op_t * ops[], u32 n_ops);
396
397 typedef void (vnet_crypto_key_handler_t) (vlib_main_t * vm,
398                                           vnet_crypto_key_op_t kop,
399                                           vnet_crypto_key_index_t idx);
400
401 /** async crypto function handlers **/
402 typedef int
403   (vnet_crypto_frame_enqueue_t) (vlib_main_t * vm,
404                                  vnet_crypto_async_frame_t * frame);
405 typedef vnet_crypto_async_frame_t *
406   (vnet_crypto_frame_dequeue_t) (vlib_main_t * vm, u32 * nb_elts_processed,
407                                  u32 * enqueue_thread_idx);
408
409 u32
410 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
411                              char *desc);
412
413 void vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
414                                        vnet_crypto_op_id_t opt,
415                                        vnet_crypto_ops_handler_t * oph);
416
417 void vnet_crypto_register_chained_ops_handler (vlib_main_t * vm,
418                                                u32 engine_index,
419                                                vnet_crypto_op_id_t opt,
420                                                vnet_crypto_chained_ops_handler_t
421                                                * oph);
422
423 void vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
424                                         vnet_crypto_op_id_t opt,
425                                         vnet_crypto_ops_handler_t * fn,
426                                         vnet_crypto_chained_ops_handler_t *
427                                         cfn);
428
429 void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
430                                        vnet_crypto_key_handler_t * keyh);
431
432 /** async crypto register functions */
433 u32 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name);
434
435 void
436 vnet_crypto_register_enqueue_handler (vlib_main_t *vm, u32 engine_index,
437                                       vnet_crypto_async_op_id_t opt,
438                                       vnet_crypto_frame_enqueue_t *enq_fn);
439
440 void
441 vnet_crypto_register_dequeue_handler (vlib_main_t *vm, u32 engine_index,
442                                       vnet_crypto_frame_dequeue_t *deq_fn);
443
444 typedef struct
445 {
446   char *name;
447   char *desc;
448   int priority;
449   vnet_crypto_key_handler_t *key_op_handler;
450   vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_IDS];
451     vnet_crypto_chained_ops_handler_t
452     * chained_ops_handlers[VNET_CRYPTO_N_OP_IDS];
453   vnet_crypto_frame_enqueue_t *enqueue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
454   vnet_crypto_frame_dequeue_t *dequeue_handler;
455 } vnet_crypto_engine_t;
456
457 typedef struct
458 {
459   u32 node_idx;
460   u32 next_idx;
461 } vnet_crypto_async_next_node_t;
462
463 typedef struct
464 {
465   vnet_crypto_alg_data_t *algs;
466   vnet_crypto_thread_t *threads;
467   vnet_crypto_ops_handler_t **ops_handlers;
468   vnet_crypto_chained_ops_handler_t **chained_ops_handlers;
469   vnet_crypto_frame_enqueue_t **enqueue_handlers;
470   vnet_crypto_frame_dequeue_t **dequeue_handlers;
471   vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS];
472   vnet_crypto_async_op_data_t async_opt_data[VNET_CRYPTO_ASYNC_OP_N_IDS];
473   vnet_crypto_engine_t *engines;
474   vnet_crypto_key_t *keys;
475   uword *engine_index_by_name;
476   uword *alg_index_by_name;
477   uword *async_alg_index_by_name;
478   vnet_crypto_async_alg_data_t *async_algs;
479   vnet_crypto_async_next_node_t *next_nodes;
480   u32 crypto_node_index;
481 } vnet_crypto_main_t;
482
483 extern vnet_crypto_main_t crypto_main;
484
485 u32 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
486                                      vnet_crypto_op_chunk_t * chunks,
487                                      u32 n_ops);
488 u32 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
489                              u32 n_ops);
490
491 void vnet_crypto_set_async_dispatch (u8 mode, u8 adaptive);
492 int vnet_crypto_set_handler2 (char *ops_handler_name, char *engine,
493                               crypto_op_class_type_t oct);
494 int vnet_crypto_is_set_handler (vnet_crypto_alg_t alg);
495
496 u32 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg,
497                          u8 * data, u16 length);
498 void vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index);
499
500 /**
501  * Use 2 created keys to generate new key for linked algs (cipher + integ)
502  * The returned key index is to be used for linked alg only.
503  **/
504 u32 vnet_crypto_key_add_linked (vlib_main_t * vm,
505                                 vnet_crypto_key_index_t index_crypto,
506                                 vnet_crypto_key_index_t index_integ);
507
508 int vnet_crypto_set_async_handler2 (char *alg_name, char *engine);
509
510 int vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t opt);
511
512 vnet_crypto_async_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
513                                                vnet_crypto_alg_t integ_alg);
514
515 format_function_t format_vnet_crypto_alg;
516 format_function_t format_vnet_crypto_engine;
517 format_function_t format_vnet_crypto_op;
518 format_function_t format_vnet_crypto_op_type;
519 format_function_t format_vnet_crypto_op_status;
520 unformat_function_t unformat_vnet_crypto_alg;
521
522 format_function_t format_vnet_crypto_async_op;
523 format_function_t format_vnet_crypto_async_alg;
524 format_function_t format_vnet_crypto_async_op_type;
525
526 static_always_inline void
527 vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_id_t type)
528 {
529   if (CLIB_DEBUG > 0)
530     clib_memset (op, 0xfe, sizeof (*op));
531   op->op = type;
532   op->flags = 0;
533   op->key_index = ~0;
534   op->n_chunks = 0;
535 }
536
537 static_always_inline vnet_crypto_op_type_t
538 vnet_crypto_get_op_type (vnet_crypto_op_id_t id)
539 {
540   vnet_crypto_main_t *cm = &crypto_main;
541   ASSERT (id < VNET_CRYPTO_N_OP_IDS);
542   vnet_crypto_op_data_t *od = cm->opt_data + id;
543   return od->type;
544 }
545
546 static_always_inline vnet_crypto_key_t *
547 vnet_crypto_get_key (vnet_crypto_key_index_t index)
548 {
549   vnet_crypto_main_t *cm = &crypto_main;
550   return vec_elt_at_index (cm->keys, index);
551 }
552
553 static_always_inline int
554 vnet_crypto_set_handler (char *alg_name, char *engine)
555 {
556   return vnet_crypto_set_handler2 (alg_name, engine, CRYPTO_OP_BOTH);
557 }
558
559 /** async crypto inline functions **/
560
561 static_always_inline vnet_crypto_async_frame_t *
562 vnet_crypto_async_get_frame (vlib_main_t * vm, vnet_crypto_async_op_id_t opt)
563 {
564   vnet_crypto_main_t *cm = &crypto_main;
565   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
566   vnet_crypto_async_frame_t *f = NULL;
567
568   if (PREDICT_TRUE (pool_free_elts (ct->frame_pool)))
569     {
570       pool_get_aligned (ct->frame_pool, f, CLIB_CACHE_LINE_BYTES);
571 #if CLIB_DEBUG > 0
572       clib_memset (f, 0xfe, sizeof (*f));
573 #endif
574       f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
575       f->op = opt;
576       f->n_elts = 0;
577     }
578
579   return f;
580 }
581
582 static_always_inline void
583 vnet_crypto_async_free_frame (vlib_main_t * vm,
584                               vnet_crypto_async_frame_t * frame)
585 {
586   vnet_crypto_main_t *cm = &crypto_main;
587   vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
588   pool_put (ct->frame_pool, frame);
589 }
590
591 static_always_inline int
592 vnet_crypto_async_submit_open_frame (vlib_main_t * vm,
593                                      vnet_crypto_async_frame_t * frame)
594 {
595   vnet_crypto_main_t *cm = &crypto_main;
596   vlib_thread_main_t *tm = vlib_get_thread_main ();
597   u32 i;
598   vlib_node_t *n;
599
600   frame->state = VNET_CRYPTO_FRAME_STATE_PENDING;
601   frame->enqueue_thread_index = vm->thread_index;
602
603   if (PREDICT_FALSE (cm->enqueue_handlers == NULL))
604     {
605       frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
606       return -1;
607     }
608
609   int ret = (cm->enqueue_handlers[frame->op]) (vm, frame);
610
611   if (PREDICT_TRUE (ret == 0))
612     {
613       n = vlib_get_node (vm, cm->crypto_node_index);
614       if (n->state == VLIB_NODE_STATE_INTERRUPT)
615         {
616           for (i = 0; i < tm->n_vlib_mains; i++)
617             vlib_node_set_interrupt_pending (vlib_get_main_by_index (i),
618                                              cm->crypto_node_index);
619         }
620     }
621   else
622     {
623       frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
624     }
625
626   return ret;
627 }
628
629 static_always_inline void
630 vnet_crypto_async_add_to_frame (vlib_main_t *vm, vnet_crypto_async_frame_t *f,
631                                 u32 key_index, u32 crypto_len,
632                                 i16 integ_len_adj, i16 crypto_start_offset,
633                                 i16 integ_start_offset, u32 buffer_index,
634                                 u16 next_node, u8 *iv, u8 *tag, u8 *aad,
635                                 u8 flags)
636 {
637   vnet_crypto_async_frame_elt_t *fe;
638   u16 index;
639
640   ASSERT (f->n_elts < VNET_CRYPTO_FRAME_SIZE);
641
642   index = f->n_elts;
643   fe = &f->elts[index];
644   f->n_elts++;
645   fe->key_index = key_index;
646   fe->crypto_total_length = crypto_len;
647   fe->crypto_start_offset = crypto_start_offset;
648   fe->integ_start_offset = integ_start_offset;
649   fe->integ_length_adj = integ_len_adj;
650   fe->iv = iv;
651   fe->tag = tag;
652   fe->aad = aad;
653   fe->flags = flags;
654   f->buffer_indices[index] = buffer_index;
655   f->next_node_index[index] = next_node;
656 }
657
658 static_always_inline void
659 vnet_crypto_async_reset_frame (vnet_crypto_async_frame_t * f)
660 {
661   vnet_crypto_async_op_id_t opt;
662   ASSERT (f != 0);
663   ASSERT ((f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED
664            || f->state == VNET_CRYPTO_FRAME_STATE_ELT_ERROR));
665   opt = f->op;
666   if (CLIB_DEBUG > 0)
667     clib_memset (f, 0xfe, sizeof (*f));
668   f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
669   f->op = opt;
670   f->n_elts = 0;
671 }
672
673 static_always_inline u8
674 vnet_crypto_async_frame_is_full (const vnet_crypto_async_frame_t *f)
675 {
676   return (f->n_elts == VNET_CRYPTO_FRAME_SIZE);
677 }
678
679 #endif /* included_vnet_crypto_crypto_h */
680
681 /*
682  * fd.io coding-style-patch-verification: ON
683  *
684  * Local Variables:
685  * eval: (c-set-style "gnu")
686  * End:
687  */