2 * Copyright (c) 2019 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 #ifndef included_vnet_crypto_crypto_h
17 #define included_vnet_crypto_crypto_h
19 #include <vlib/vlib.h>
21 #define VNET_CRYPTO_FRAME_SIZE 64
22 #define VNET_CRYPTO_FRAME_POOL_SIZE 1024
24 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
25 #define foreach_crypto_cipher_alg \
26 _(DES_CBC, "des-cbc", 7) \
27 _(3DES_CBC, "3des-cbc", 24) \
28 _(AES_128_CBC, "aes-128-cbc", 16) \
29 _(AES_192_CBC, "aes-192-cbc", 24) \
30 _(AES_256_CBC, "aes-256-cbc", 32) \
31 _(AES_128_CTR, "aes-128-ctr", 16) \
32 _(AES_192_CTR, "aes-192-ctr", 24) \
33 _(AES_256_CTR, "aes-256-ctr", 32)
35 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
36 #define foreach_crypto_aead_alg \
37 _ (AES_128_GCM, "aes-128-gcm", 16) \
38 _ (AES_192_GCM, "aes-192-gcm", 24) \
39 _ (AES_256_GCM, "aes-256-gcm", 32) \
40 _ (AES_128_NULL_GMAC, "aes-128-null-gmac", 16) \
41 _ (AES_192_NULL_GMAC, "aes-192-null-gmac", 24) \
42 _ (AES_256_NULL_GMAC, "aes-256-null-gmac", 32) \
43 _ (CHACHA20_POLY1305, "chacha20-poly1305", 32)
45 #define foreach_crypto_hash_alg \
47 _ (SHA224, "sha-224") \
48 _ (SHA256, "sha-256") \
49 _ (SHA384, "sha-384") \
52 #define foreach_crypto_hmac_alg \
55 _(SHA224, "sha-224") \
56 _(SHA256, "sha-256") \
57 _(SHA384, "sha-384") \
60 #define foreach_crypto_op_type \
61 _ (ENCRYPT, "encrypt") \
62 _ (DECRYPT, "decrypt") \
63 _ (AEAD_ENCRYPT, "aead-encrypt") \
64 _ (AEAD_DECRYPT, "aead-decrypt") \
70 #define _(n, s) VNET_CRYPTO_OP_TYPE_##n,
71 foreach_crypto_op_type
73 VNET_CRYPTO_OP_N_TYPES,
74 } vnet_crypto_op_type_t;
76 #define foreach_crypto_op_status \
78 _(PENDING, "pending") \
79 _(WORK_IN_PROGRESS, "work-in-progress") \
80 _(COMPLETED, "completed") \
81 _(FAIL_NO_HANDLER, "no-handler") \
82 _(FAIL_BAD_HMAC, "bad-hmac") \
83 _(FAIL_ENGINE_ERR, "engine-error")
87 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, TAG_LEN, AAD_LEN */
88 #define foreach_crypto_aead_async_alg \
89 _ (AES_128_GCM, "aes-128-gcm-aad8", 16, 16, 8) \
90 _ (AES_128_GCM, "aes-128-gcm-aad12", 16, 16, 12) \
91 _ (AES_192_GCM, "aes-192-gcm-aad8", 24, 16, 8) \
92 _ (AES_192_GCM, "aes-192-gcm-aad12", 24, 16, 12) \
93 _ (AES_256_GCM, "aes-256-gcm-aad8", 32, 16, 8) \
94 _ (AES_256_GCM, "aes-256-gcm-aad12", 32, 16, 12) \
95 _ (AES_128_NULL_GMAC, "aes-128-null-gmac-aad8", 16, 16, 8) \
96 _ (AES_128_NULL_GMAC, "aes-128-null-gmac-aad12", 16, 16, 12) \
97 _ (AES_192_NULL_GMAC, "aes-192-null-gmac-aad8", 24, 16, 8) \
98 _ (AES_192_NULL_GMAC, "aes-192-null-gmac-aad12", 24, 16, 12) \
99 _ (AES_256_NULL_GMAC, "aes-256-null-gmac-aad8", 32, 16, 8) \
100 _ (AES_256_NULL_GMAC, "aes-256-null-gmac-aad12", 32, 16, 12) \
101 _ (CHACHA20_POLY1305, "chacha20-poly1305-aad8", 32, 16, 8) \
102 _ (CHACHA20_POLY1305, "chacha20-poly1305-aad12", 32, 16, 12) \
103 _ (CHACHA20_POLY1305, "chacha20-poly1305", 32, 16, 0)
105 /* CRYPTO_ID, INTEG_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, DIGEST_LEN */
106 #define foreach_crypto_link_async_alg \
107 _ (3DES_CBC, MD5, "3des-cbc-hmac-md5", 24, 12) \
108 _ (AES_128_CBC, MD5, "aes-128-cbc-hmac-md5", 16, 12) \
109 _ (AES_192_CBC, MD5, "aes-192-cbc-hmac-md5", 24, 12) \
110 _ (AES_256_CBC, MD5, "aes-256-cbc-hmac-md5", 32, 12) \
111 _ (3DES_CBC, SHA1, "3des-cbc-hmac-sha-1", 24, 12) \
112 _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12) \
113 _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12) \
114 _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12) \
115 _ (3DES_CBC, SHA224, "3des-cbc-hmac-sha-224", 24, 14) \
116 _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14) \
117 _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14) \
118 _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14) \
119 _ (3DES_CBC, SHA256, "3des-cbc-hmac-sha-256", 24, 16) \
120 _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16) \
121 _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16) \
122 _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16) \
123 _ (3DES_CBC, SHA384, "3des-cbc-hmac-sha-384", 24, 24) \
124 _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24) \
125 _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24) \
126 _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24) \
127 _ (3DES_CBC, SHA512, "3des-cbc-hmac-sha-512", 24, 32) \
128 _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32) \
129 _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32) \
130 _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32) \
131 _ (AES_128_CTR, SHA1, "aes-128-ctr-hmac-sha-1", 16, 12) \
132 _ (AES_192_CTR, SHA1, "aes-192-ctr-hmac-sha-1", 24, 12) \
133 _ (AES_256_CTR, SHA1, "aes-256-ctr-hmac-sha-1", 32, 12)
135 #define foreach_crypto_async_op_type \
136 _(ENCRYPT, "async-encrypt") \
137 _(DECRYPT, "async-decrypt")
141 VNET_CRYPTO_KEY_OP_ADD,
142 VNET_CRYPTO_KEY_OP_DEL,
143 VNET_CRYPTO_KEY_OP_MODIFY,
144 } vnet_crypto_key_op_t;
148 #define _(n, s) VNET_CRYPTO_OP_STATUS_##n,
149 foreach_crypto_op_status
151 VNET_CRYPTO_OP_N_STATUS,
152 } vnet_crypto_op_status_t;
157 VNET_CRYPTO_ALG_NONE = 0,
158 #define _(n, s, l) VNET_CRYPTO_ALG_##n,
159 foreach_crypto_cipher_alg foreach_crypto_aead_alg
161 #define _(n, s) VNET_CRYPTO_ALG_HMAC_##n,
162 foreach_crypto_hmac_alg
164 #define _(n, s) VNET_CRYPTO_ALG_HASH_##n,
165 foreach_crypto_hash_alg
172 #define _(n, s) VNET_CRYPTO_ASYNC_OP_TYPE_##n,
173 foreach_crypto_async_op_type
175 VNET_CRYPTO_ASYNC_OP_N_TYPES,
176 } vnet_crypto_async_op_type_t;
180 VNET_CRYPTO_ASYNC_ALG_NONE = 0,
181 #define _(n, s, k, t, a) \
182 VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a,
183 foreach_crypto_aead_async_alg
185 #define _(c, h, s, k ,d) \
186 VNET_CRYPTO_ALG_##c##_##h##_TAG##d,
187 foreach_crypto_link_async_alg
189 VNET_CRYPTO_N_ASYNC_ALGS,
190 } vnet_crypto_async_alg_t;
194 VNET_CRYPTO_ASYNC_OP_NONE = 0,
195 #define _(n, s, k, t, a) \
196 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
197 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC,
198 foreach_crypto_aead_async_alg
200 #define _(c, h, s, k ,d) \
201 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
202 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC,
203 foreach_crypto_link_async_alg
205 VNET_CRYPTO_ASYNC_OP_N_IDS,
206 } vnet_crypto_async_op_id_t;
215 vnet_crypto_alg_t alg:8;
221 vnet_crypto_async_alg_t async_alg:8;
224 #define VNET_CRYPTO_KEY_TYPE_DATA 0
225 #define VNET_CRYPTO_KEY_TYPE_LINK 1
231 VNET_CRYPTO_OP_NONE = 0,
232 #define _(n, s, l) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC,
233 foreach_crypto_cipher_alg foreach_crypto_aead_alg
235 #define _(n, s) VNET_CRYPTO_OP_##n##_HMAC,
236 foreach_crypto_hmac_alg
238 #define _(n, s) VNET_CRYPTO_OP_##n##_HASH,
239 foreach_crypto_hash_alg
241 VNET_CRYPTO_N_OP_IDS,
242 } vnet_crypto_op_id_t;
250 } crypto_op_class_type_t;
255 vnet_crypto_op_id_t op_by_type[VNET_CRYPTO_OP_N_TYPES];
256 } vnet_crypto_alg_data_t;
263 } vnet_crypto_op_chunk_t;
267 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
269 vnet_crypto_op_id_t op:16;
270 vnet_crypto_op_status_t status:8;
272 #define VNET_CRYPTO_OP_FLAG_HMAC_CHECK (1 << 0)
273 #define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS (1 << 1)
290 /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
297 /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
312 STATIC_ASSERT_SIZEOF (vnet_crypto_op_t, CLIB_CACHE_LINE_BYTES);
316 vnet_crypto_op_type_t type;
317 vnet_crypto_alg_t alg;
318 u32 active_engine_index_simple;
319 u32 active_engine_index_chained;
320 } vnet_crypto_op_data_t;
324 vnet_crypto_async_op_type_t type;
325 vnet_crypto_async_alg_t alg;
326 u32 active_engine_index_async;
327 } vnet_crypto_async_op_data_t;
332 vnet_crypto_async_op_id_t op_by_type[VNET_CRYPTO_ASYNC_OP_N_TYPES];
333 } vnet_crypto_async_alg_data_t;
345 u32 crypto_total_length;
346 i16 crypto_start_offset; /* first buffer offset */
347 i16 integ_start_offset;
348 /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
349 i16 integ_length_adj;
350 vnet_crypto_op_status_t status : 8;
351 u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */
352 } vnet_crypto_async_frame_elt_t;
354 /* Assert the size so the compiler will warn us when it changes */
355 STATIC_ASSERT_SIZEOF (vnet_crypto_async_frame_elt_t, 5 * sizeof (u64));
357 typedef enum vnet_crypto_async_frame_state_t_
359 VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED,
360 /* frame waiting to be processed */
361 VNET_CRYPTO_FRAME_STATE_PENDING,
362 VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS,
363 VNET_CRYPTO_FRAME_STATE_SUCCESS,
364 VNET_CRYPTO_FRAME_STATE_ELT_ERROR
365 } __clib_packed vnet_crypto_async_frame_state_t;
369 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
370 vnet_crypto_async_frame_state_t state;
371 vnet_crypto_async_op_id_t op:8;
373 vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE];
374 u32 buffer_indices[VNET_CRYPTO_FRAME_SIZE];
375 u16 next_node_index[VNET_CRYPTO_FRAME_SIZE];
376 u32 enqueue_thread_index;
377 } vnet_crypto_async_frame_t;
381 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
382 vnet_crypto_async_frame_t *frame_pool;
385 } vnet_crypto_thread_t;
387 typedef u32 vnet_crypto_key_index_t;
389 typedef u32 (vnet_crypto_chained_ops_handler_t) (vlib_main_t * vm,
390 vnet_crypto_op_t * ops[],
391 vnet_crypto_op_chunk_t *
394 typedef u32 (vnet_crypto_ops_handler_t) (vlib_main_t * vm,
395 vnet_crypto_op_t * ops[], u32 n_ops);
397 typedef void (vnet_crypto_key_handler_t) (vlib_main_t * vm,
398 vnet_crypto_key_op_t kop,
399 vnet_crypto_key_index_t idx);
401 /** async crypto function handlers **/
403 (vnet_crypto_frame_enqueue_t) (vlib_main_t * vm,
404 vnet_crypto_async_frame_t * frame);
405 typedef vnet_crypto_async_frame_t *
406 (vnet_crypto_frame_dequeue_t) (vlib_main_t * vm, u32 * nb_elts_processed,
407 u32 * enqueue_thread_idx);
410 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
413 void vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
414 vnet_crypto_op_id_t opt,
415 vnet_crypto_ops_handler_t * oph);
417 void vnet_crypto_register_chained_ops_handler (vlib_main_t * vm,
419 vnet_crypto_op_id_t opt,
420 vnet_crypto_chained_ops_handler_t
423 void vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
424 vnet_crypto_op_id_t opt,
425 vnet_crypto_ops_handler_t * fn,
426 vnet_crypto_chained_ops_handler_t *
429 void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
430 vnet_crypto_key_handler_t * keyh);
432 /** async crypto register functions */
433 u32 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name);
436 vnet_crypto_register_enqueue_handler (vlib_main_t *vm, u32 engine_index,
437 vnet_crypto_async_op_id_t opt,
438 vnet_crypto_frame_enqueue_t *enq_fn);
441 vnet_crypto_register_dequeue_handler (vlib_main_t *vm, u32 engine_index,
442 vnet_crypto_frame_dequeue_t *deq_fn);
449 vnet_crypto_key_handler_t *key_op_handler;
450 vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_IDS];
451 vnet_crypto_chained_ops_handler_t
452 * chained_ops_handlers[VNET_CRYPTO_N_OP_IDS];
453 vnet_crypto_frame_enqueue_t *enqueue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
454 vnet_crypto_frame_dequeue_t *dequeue_handler;
455 } vnet_crypto_engine_t;
461 } vnet_crypto_async_next_node_t;
465 vnet_crypto_alg_data_t *algs;
466 vnet_crypto_thread_t *threads;
467 vnet_crypto_ops_handler_t **ops_handlers;
468 vnet_crypto_chained_ops_handler_t **chained_ops_handlers;
469 vnet_crypto_frame_enqueue_t **enqueue_handlers;
470 vnet_crypto_frame_dequeue_t **dequeue_handlers;
471 vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS];
472 vnet_crypto_async_op_data_t async_opt_data[VNET_CRYPTO_ASYNC_OP_N_IDS];
473 vnet_crypto_engine_t *engines;
474 vnet_crypto_key_t *keys;
475 uword *engine_index_by_name;
476 uword *alg_index_by_name;
477 uword *async_alg_index_by_name;
478 vnet_crypto_async_alg_data_t *async_algs;
479 vnet_crypto_async_next_node_t *next_nodes;
480 u32 crypto_node_index;
481 } vnet_crypto_main_t;
483 extern vnet_crypto_main_t crypto_main;
485 u32 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
486 vnet_crypto_op_chunk_t * chunks,
488 u32 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
492 int vnet_crypto_set_handler2 (char *ops_handler_name, char *engine,
493 crypto_op_class_type_t oct);
494 int vnet_crypto_is_set_handler (vnet_crypto_alg_t alg);
496 u32 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg,
497 u8 * data, u16 length);
498 void vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index);
501 * Use 2 created keys to generate new key for linked algs (cipher + integ)
502 * The returned key index is to be used for linked alg only.
504 u32 vnet_crypto_key_add_linked (vlib_main_t * vm,
505 vnet_crypto_key_index_t index_crypto,
506 vnet_crypto_key_index_t index_integ);
508 int vnet_crypto_set_async_handler2 (char *alg_name, char *engine);
510 int vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t opt);
512 vnet_crypto_async_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
513 vnet_crypto_alg_t integ_alg);
515 format_function_t format_vnet_crypto_alg;
516 format_function_t format_vnet_crypto_engine;
517 format_function_t format_vnet_crypto_op;
518 format_function_t format_vnet_crypto_op_type;
519 format_function_t format_vnet_crypto_op_status;
520 unformat_function_t unformat_vnet_crypto_alg;
522 format_function_t format_vnet_crypto_async_op;
523 format_function_t format_vnet_crypto_async_alg;
524 format_function_t format_vnet_crypto_async_op_type;
526 static_always_inline void
527 vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_id_t type)
530 clib_memset (op, 0xfe, sizeof (*op));
537 static_always_inline vnet_crypto_op_type_t
538 vnet_crypto_get_op_type (vnet_crypto_op_id_t id)
540 vnet_crypto_main_t *cm = &crypto_main;
541 ASSERT (id < VNET_CRYPTO_N_OP_IDS);
542 vnet_crypto_op_data_t *od = cm->opt_data + id;
546 static_always_inline vnet_crypto_key_t *
547 vnet_crypto_get_key (vnet_crypto_key_index_t index)
549 vnet_crypto_main_t *cm = &crypto_main;
550 return vec_elt_at_index (cm->keys, index);
553 static_always_inline int
554 vnet_crypto_set_handler (char *alg_name, char *engine)
556 return vnet_crypto_set_handler2 (alg_name, engine, CRYPTO_OP_BOTH);
559 /** async crypto inline functions **/
561 static_always_inline vnet_crypto_async_frame_t *
562 vnet_crypto_async_get_frame (vlib_main_t * vm, vnet_crypto_async_op_id_t opt)
564 vnet_crypto_main_t *cm = &crypto_main;
565 vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
566 vnet_crypto_async_frame_t *f = NULL;
568 if (PREDICT_TRUE (pool_free_elts (ct->frame_pool)))
570 pool_get_aligned (ct->frame_pool, f, CLIB_CACHE_LINE_BYTES);
572 clib_memset (f, 0xfe, sizeof (*f));
574 f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
582 static_always_inline void
583 vnet_crypto_async_free_frame (vlib_main_t * vm,
584 vnet_crypto_async_frame_t * frame)
586 vnet_crypto_main_t *cm = &crypto_main;
587 vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
588 pool_put (ct->frame_pool, frame);
591 static_always_inline int
592 vnet_crypto_async_submit_open_frame (vlib_main_t * vm,
593 vnet_crypto_async_frame_t * frame)
595 vnet_crypto_main_t *cm = &crypto_main;
596 vlib_thread_main_t *tm = vlib_get_thread_main ();
600 frame->state = VNET_CRYPTO_FRAME_STATE_PENDING;
601 frame->enqueue_thread_index = vm->thread_index;
603 if (PREDICT_FALSE (cm->enqueue_handlers == NULL))
605 frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
609 int ret = (cm->enqueue_handlers[frame->op]) (vm, frame);
611 if (PREDICT_TRUE (ret == 0))
613 n = vlib_get_node (vm, cm->crypto_node_index);
614 if (n->state == VLIB_NODE_STATE_INTERRUPT)
616 for (i = 0; i < tm->n_vlib_mains; i++)
617 vlib_node_set_interrupt_pending (vlib_get_main_by_index (i),
618 cm->crypto_node_index);
623 frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
629 static_always_inline void
630 vnet_crypto_async_add_to_frame (vlib_main_t *vm, vnet_crypto_async_frame_t *f,
631 u32 key_index, u32 crypto_len,
632 i16 integ_len_adj, i16 crypto_start_offset,
633 i16 integ_start_offset, u32 buffer_index,
634 u16 next_node, u8 *iv, u8 *tag, u8 *aad,
637 vnet_crypto_async_frame_elt_t *fe;
640 ASSERT (f->n_elts < VNET_CRYPTO_FRAME_SIZE);
643 fe = &f->elts[index];
645 fe->key_index = key_index;
646 fe->crypto_total_length = crypto_len;
647 fe->crypto_start_offset = crypto_start_offset;
648 fe->integ_start_offset = integ_start_offset;
649 fe->integ_length_adj = integ_len_adj;
654 f->buffer_indices[index] = buffer_index;
655 f->next_node_index[index] = next_node;
658 static_always_inline void
659 vnet_crypto_async_reset_frame (vnet_crypto_async_frame_t * f)
661 vnet_crypto_async_op_id_t opt;
663 ASSERT ((f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED
664 || f->state == VNET_CRYPTO_FRAME_STATE_ELT_ERROR));
667 clib_memset (f, 0xfe, sizeof (*f));
668 f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
673 static_always_inline u8
674 vnet_crypto_async_frame_is_full (const vnet_crypto_async_frame_t *f)
676 return (f->n_elts == VNET_CRYPTO_FRAME_SIZE);
679 #endif /* included_vnet_crypto_crypto_h */
682 * fd.io coding-style-patch-verification: ON
685 * eval: (c-set-style "gnu")