2 * Copyright (c) 2019 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 #ifndef included_vnet_crypto_crypto_h
17 #define included_vnet_crypto_crypto_h
19 #include <vlib/vlib.h>
21 #define VNET_CRYPTO_FRAME_SIZE 64
22 #define VNET_CRYPTO_FRAME_POOL_SIZE 1024
24 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
25 #define foreach_crypto_cipher_alg \
26 _(DES_CBC, "des-cbc", 7) \
27 _(3DES_CBC, "3des-cbc", 24) \
28 _(AES_128_CBC, "aes-128-cbc", 16) \
29 _(AES_192_CBC, "aes-192-cbc", 24) \
30 _(AES_256_CBC, "aes-256-cbc", 32) \
31 _(AES_128_CTR, "aes-128-ctr", 16) \
32 _(AES_192_CTR, "aes-192-ctr", 24) \
33 _(AES_256_CTR, "aes-256-ctr", 32)
35 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
36 #define foreach_crypto_aead_alg \
37 _ (AES_128_GCM, "aes-128-gcm", 16) \
38 _ (AES_192_GCM, "aes-192-gcm", 24) \
39 _ (AES_256_GCM, "aes-256-gcm", 32) \
40 _ (AES_128_NULL_GMAC, "aes-128-null-gmac", 16) \
41 _ (AES_192_NULL_GMAC, "aes-192-null-gmac", 24) \
42 _ (AES_256_NULL_GMAC, "aes-256-null-gmac", 32) \
43 _ (CHACHA20_POLY1305, "chacha20-poly1305", 32)
45 #define foreach_crypto_hash_alg \
47 _ (SHA224, "sha-224") \
48 _ (SHA256, "sha-256") \
49 _ (SHA384, "sha-384") \
52 #define foreach_crypto_hmac_alg \
55 _(SHA224, "sha-224") \
56 _(SHA256, "sha-256") \
57 _(SHA384, "sha-384") \
60 #define foreach_crypto_op_type \
61 _ (ENCRYPT, "encrypt") \
62 _ (DECRYPT, "decrypt") \
63 _ (AEAD_ENCRYPT, "aead-encrypt") \
64 _ (AEAD_DECRYPT, "aead-decrypt") \
70 #define _(n, s) VNET_CRYPTO_OP_TYPE_##n,
71 foreach_crypto_op_type
73 VNET_CRYPTO_OP_N_TYPES,
74 } vnet_crypto_op_type_t;
76 #define foreach_crypto_op_status \
78 _(PENDING, "pending") \
79 _(WORK_IN_PROGRESS, "work-in-progress") \
80 _(COMPLETED, "completed") \
81 _(FAIL_NO_HANDLER, "no-handler") \
82 _(FAIL_BAD_HMAC, "bad-hmac") \
83 _(FAIL_ENGINE_ERR, "engine-error")
87 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, TAG_LEN, AAD_LEN */
88 #define foreach_crypto_aead_async_alg \
89 _ (AES_128_GCM, "aes-128-gcm-aad8", 16, 16, 8) \
90 _ (AES_128_GCM, "aes-128-gcm-aad12", 16, 16, 12) \
91 _ (AES_192_GCM, "aes-192-gcm-aad8", 24, 16, 8) \
92 _ (AES_192_GCM, "aes-192-gcm-aad12", 24, 16, 12) \
93 _ (AES_256_GCM, "aes-256-gcm-aad8", 32, 16, 8) \
94 _ (AES_256_GCM, "aes-256-gcm-aad12", 32, 16, 12) \
95 _ (AES_128_NULL_GMAC, "aes-128-null-gmac-aad8", 16, 16, 8) \
96 _ (AES_128_NULL_GMAC, "aes-128-null-gmac-aad12", 16, 16, 12) \
97 _ (AES_192_NULL_GMAC, "aes-192-null-gmac-aad8", 24, 16, 8) \
98 _ (AES_192_NULL_GMAC, "aes-192-null-gmac-aad12", 24, 16, 12) \
99 _ (AES_256_NULL_GMAC, "aes-256-null-gmac-aad8", 32, 16, 8) \
100 _ (AES_256_NULL_GMAC, "aes-256-null-gmac-aad12", 32, 16, 12) \
101 _ (CHACHA20_POLY1305, "chacha20-poly1305-aad8", 32, 16, 8) \
102 _ (CHACHA20_POLY1305, "chacha20-poly1305-aad12", 32, 16, 12) \
103 _ (CHACHA20_POLY1305, "chacha20-poly1305", 32, 16, 0)
105 /* CRYPTO_ID, INTEG_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, DIGEST_LEN */
106 #define foreach_crypto_link_async_alg \
107 _ (3DES_CBC, MD5, "3des-cbc-hmac-md5", 24, 12) \
108 _ (AES_128_CBC, MD5, "aes-128-cbc-hmac-md5", 16, 12) \
109 _ (AES_192_CBC, MD5, "aes-192-cbc-hmac-md5", 24, 12) \
110 _ (AES_256_CBC, MD5, "aes-256-cbc-hmac-md5", 32, 12) \
111 _ (3DES_CBC, SHA1, "3des-cbc-hmac-sha-1", 24, 12) \
112 _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12) \
113 _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12) \
114 _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12) \
115 _ (3DES_CBC, SHA224, "3des-cbc-hmac-sha-224", 24, 14) \
116 _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14) \
117 _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14) \
118 _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14) \
119 _ (3DES_CBC, SHA256, "3des-cbc-hmac-sha-256", 24, 16) \
120 _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16) \
121 _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16) \
122 _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16) \
123 _ (3DES_CBC, SHA384, "3des-cbc-hmac-sha-384", 24, 24) \
124 _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24) \
125 _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24) \
126 _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24) \
127 _ (3DES_CBC, SHA512, "3des-cbc-hmac-sha-512", 24, 32) \
128 _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32) \
129 _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32) \
130 _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32) \
131 _ (AES_128_CTR, SHA1, "aes-128-ctr-hmac-sha-1", 16, 12) \
132 _ (AES_192_CTR, SHA1, "aes-192-ctr-hmac-sha-1", 24, 12) \
133 _ (AES_256_CTR, SHA1, "aes-256-ctr-hmac-sha-1", 32, 12)
135 #define foreach_crypto_async_op_type \
136 _(ENCRYPT, "async-encrypt") \
137 _(DECRYPT, "async-decrypt")
141 VNET_CRYPTO_KEY_OP_ADD,
142 VNET_CRYPTO_KEY_OP_DEL,
143 VNET_CRYPTO_KEY_OP_MODIFY,
144 } vnet_crypto_key_op_t;
148 #define _(n, s) VNET_CRYPTO_OP_STATUS_##n,
149 foreach_crypto_op_status
151 VNET_CRYPTO_OP_N_STATUS,
152 } vnet_crypto_op_status_t;
156 VNET_CRYPTO_ALG_NONE = 0,
157 #define _(n, s, l) VNET_CRYPTO_ALG_##n,
158 foreach_crypto_cipher_alg foreach_crypto_aead_alg
160 #define _(n, s) VNET_CRYPTO_ALG_HMAC_##n,
161 foreach_crypto_hmac_alg
163 #define _(n, s) VNET_CRYPTO_ALG_HASH_##n,
164 foreach_crypto_hash_alg
171 #define _(n, s) VNET_CRYPTO_ASYNC_OP_TYPE_##n,
172 foreach_crypto_async_op_type
174 VNET_CRYPTO_ASYNC_OP_N_TYPES,
175 } vnet_crypto_async_op_type_t;
179 VNET_CRYPTO_ASYNC_ALG_NONE = 0,
180 #define _(n, s, k, t, a) \
181 VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a,
182 foreach_crypto_aead_async_alg
184 #define _(c, h, s, k ,d) \
185 VNET_CRYPTO_ALG_##c##_##h##_TAG##d,
186 foreach_crypto_link_async_alg
188 VNET_CRYPTO_N_ASYNC_ALGS,
189 } vnet_crypto_async_alg_t;
193 VNET_CRYPTO_ASYNC_OP_NONE = 0,
194 #define _(n, s, k, t, a) \
195 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
196 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC,
197 foreach_crypto_aead_async_alg
199 #define _(c, h, s, k ,d) \
200 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
201 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC,
202 foreach_crypto_link_async_alg
204 VNET_CRYPTO_ASYNC_OP_N_IDS,
205 } vnet_crypto_async_op_id_t;
214 vnet_crypto_alg_t alg:8;
220 vnet_crypto_async_alg_t async_alg:8;
223 #define VNET_CRYPTO_KEY_TYPE_DATA 0
224 #define VNET_CRYPTO_KEY_TYPE_LINK 1
230 VNET_CRYPTO_OP_NONE = 0,
231 #define _(n, s, l) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC,
232 foreach_crypto_cipher_alg foreach_crypto_aead_alg
234 #define _(n, s) VNET_CRYPTO_OP_##n##_HMAC,
235 foreach_crypto_hmac_alg
237 #define _(n, s) VNET_CRYPTO_OP_##n##_HASH,
238 foreach_crypto_hash_alg
240 VNET_CRYPTO_N_OP_IDS,
241 } vnet_crypto_op_id_t;
248 } crypto_op_class_type_t;
253 vnet_crypto_op_id_t op_by_type[VNET_CRYPTO_OP_N_TYPES];
254 } vnet_crypto_alg_data_t;
261 } vnet_crypto_op_chunk_t;
265 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
267 vnet_crypto_op_id_t op:16;
268 vnet_crypto_op_status_t status:8;
270 #define VNET_CRYPTO_OP_FLAG_HMAC_CHECK (1 << 0)
271 #define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS (1 << 1)
288 /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
295 /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
310 STATIC_ASSERT_SIZEOF (vnet_crypto_op_t, CLIB_CACHE_LINE_BYTES);
314 vnet_crypto_op_type_t type;
315 vnet_crypto_alg_t alg;
316 u32 active_engine_index_simple;
317 u32 active_engine_index_chained;
318 } vnet_crypto_op_data_t;
322 vnet_crypto_async_op_type_t type;
323 vnet_crypto_async_alg_t alg;
324 u32 active_engine_index_async;
325 } vnet_crypto_async_op_data_t;
330 vnet_crypto_async_op_id_t op_by_type[VNET_CRYPTO_ASYNC_OP_N_TYPES];
331 } vnet_crypto_async_alg_data_t;
343 u32 crypto_total_length;
344 i16 crypto_start_offset; /* first buffer offset */
345 i16 integ_start_offset;
346 /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
347 i16 integ_length_adj;
348 vnet_crypto_op_status_t status : 8;
349 u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */
350 } vnet_crypto_async_frame_elt_t;
352 /* Assert the size so the compiler will warn us when it changes */
353 STATIC_ASSERT_SIZEOF (vnet_crypto_async_frame_elt_t, 5 * sizeof (u64));
355 typedef enum vnet_crypto_async_frame_state_t_
357 VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED,
358 /* frame waiting to be processed */
359 VNET_CRYPTO_FRAME_STATE_PENDING,
360 VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS,
361 VNET_CRYPTO_FRAME_STATE_SUCCESS,
362 VNET_CRYPTO_FRAME_STATE_ELT_ERROR
363 } __clib_packed vnet_crypto_async_frame_state_t;
367 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
368 vnet_crypto_async_frame_state_t state;
369 vnet_crypto_async_op_id_t op:8;
371 vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE];
372 u32 buffer_indices[VNET_CRYPTO_FRAME_SIZE];
373 u16 next_node_index[VNET_CRYPTO_FRAME_SIZE];
374 u32 enqueue_thread_index;
375 } vnet_crypto_async_frame_t;
379 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
380 vnet_crypto_async_frame_t *frame_pool;
383 } vnet_crypto_thread_t;
385 typedef u32 vnet_crypto_key_index_t;
387 typedef u32 (vnet_crypto_chained_ops_handler_t) (vlib_main_t * vm,
388 vnet_crypto_op_t * ops[],
389 vnet_crypto_op_chunk_t *
392 typedef u32 (vnet_crypto_ops_handler_t) (vlib_main_t * vm,
393 vnet_crypto_op_t * ops[], u32 n_ops);
395 typedef void (vnet_crypto_key_handler_t) (vlib_main_t * vm,
396 vnet_crypto_key_op_t kop,
397 vnet_crypto_key_index_t idx);
399 /** async crypto function handlers **/
401 (vnet_crypto_frame_enqueue_t) (vlib_main_t * vm,
402 vnet_crypto_async_frame_t * frame);
403 typedef vnet_crypto_async_frame_t *
404 (vnet_crypto_frame_dequeue_t) (vlib_main_t * vm, u32 * nb_elts_processed,
405 u32 * enqueue_thread_idx);
408 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
411 void vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
412 vnet_crypto_op_id_t opt,
413 vnet_crypto_ops_handler_t * oph);
415 void vnet_crypto_register_chained_ops_handler (vlib_main_t * vm,
417 vnet_crypto_op_id_t opt,
418 vnet_crypto_chained_ops_handler_t
421 void vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
422 vnet_crypto_op_id_t opt,
423 vnet_crypto_ops_handler_t * fn,
424 vnet_crypto_chained_ops_handler_t *
427 void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
428 vnet_crypto_key_handler_t * keyh);
430 /** async crypto register functions */
431 u32 vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name);
434 vnet_crypto_register_enqueue_handler (vlib_main_t *vm, u32 engine_index,
435 vnet_crypto_async_op_id_t opt,
436 vnet_crypto_frame_enqueue_t *enq_fn);
439 vnet_crypto_register_dequeue_handler (vlib_main_t *vm, u32 engine_index,
440 vnet_crypto_frame_dequeue_t *deq_fn);
447 vnet_crypto_key_handler_t *key_op_handler;
448 vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_IDS];
449 vnet_crypto_chained_ops_handler_t
450 * chained_ops_handlers[VNET_CRYPTO_N_OP_IDS];
451 vnet_crypto_frame_enqueue_t *enqueue_handlers[VNET_CRYPTO_ASYNC_OP_N_IDS];
452 vnet_crypto_frame_dequeue_t *dequeue_handler;
453 } vnet_crypto_engine_t;
459 } vnet_crypto_async_next_node_t;
463 vnet_crypto_alg_data_t *algs;
464 vnet_crypto_thread_t *threads;
465 vnet_crypto_ops_handler_t **ops_handlers;
466 vnet_crypto_chained_ops_handler_t **chained_ops_handlers;
467 vnet_crypto_frame_enqueue_t **enqueue_handlers;
468 vnet_crypto_frame_dequeue_t **dequeue_handlers;
469 vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS];
470 vnet_crypto_async_op_data_t async_opt_data[VNET_CRYPTO_ASYNC_OP_N_IDS];
471 vnet_crypto_engine_t *engines;
472 vnet_crypto_key_t *keys;
473 uword *engine_index_by_name;
474 uword *alg_index_by_name;
475 uword *async_alg_index_by_name;
476 vnet_crypto_async_alg_data_t *async_algs;
477 vnet_crypto_async_next_node_t *next_nodes;
478 u32 crypto_node_index;
479 } vnet_crypto_main_t;
481 extern vnet_crypto_main_t crypto_main;
483 u32 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
484 vnet_crypto_op_chunk_t * chunks,
486 u32 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
489 void vnet_crypto_set_async_dispatch (u8 mode, u8 adaptive);
490 int vnet_crypto_set_handler2 (char *ops_handler_name, char *engine,
491 crypto_op_class_type_t oct);
492 int vnet_crypto_is_set_handler (vnet_crypto_alg_t alg);
494 u32 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg,
495 u8 * data, u16 length);
496 void vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index);
497 void vnet_crypto_key_update (vlib_main_t *vm, vnet_crypto_key_index_t index);
500 * Use 2 created keys to generate new key for linked algs (cipher + integ)
501 * The returned key index is to be used for linked alg only.
503 u32 vnet_crypto_key_add_linked (vlib_main_t * vm,
504 vnet_crypto_key_index_t index_crypto,
505 vnet_crypto_key_index_t index_integ);
507 int vnet_crypto_set_async_handler2 (char *alg_name, char *engine);
509 int vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t opt);
511 vnet_crypto_async_alg_t vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
512 vnet_crypto_alg_t integ_alg);
514 format_function_t format_vnet_crypto_alg;
515 format_function_t format_vnet_crypto_engine;
516 format_function_t format_vnet_crypto_op;
517 format_function_t format_vnet_crypto_op_type;
518 format_function_t format_vnet_crypto_op_status;
519 unformat_function_t unformat_vnet_crypto_alg;
521 format_function_t format_vnet_crypto_async_op;
522 format_function_t format_vnet_crypto_async_alg;
523 format_function_t format_vnet_crypto_async_op_type;
525 static_always_inline void
526 vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_id_t type)
529 clib_memset (op, 0xfe, sizeof (*op));
536 static_always_inline vnet_crypto_op_type_t
537 vnet_crypto_get_op_type (vnet_crypto_op_id_t id)
539 vnet_crypto_main_t *cm = &crypto_main;
540 ASSERT (id < VNET_CRYPTO_N_OP_IDS);
541 vnet_crypto_op_data_t *od = cm->opt_data + id;
545 static_always_inline vnet_crypto_key_t *
546 vnet_crypto_get_key (vnet_crypto_key_index_t index)
548 vnet_crypto_main_t *cm = &crypto_main;
549 return vec_elt_at_index (cm->keys, index);
552 static_always_inline int
553 vnet_crypto_set_handler (char *alg_name, char *engine)
555 return vnet_crypto_set_handler2 (alg_name, engine, CRYPTO_OP_BOTH);
558 /** async crypto inline functions **/
560 static_always_inline vnet_crypto_async_frame_t *
561 vnet_crypto_async_get_frame (vlib_main_t * vm, vnet_crypto_async_op_id_t opt)
563 vnet_crypto_main_t *cm = &crypto_main;
564 vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
565 vnet_crypto_async_frame_t *f = NULL;
567 if (PREDICT_TRUE (pool_free_elts (ct->frame_pool)))
569 pool_get_aligned (ct->frame_pool, f, CLIB_CACHE_LINE_BYTES);
571 clib_memset (f, 0xfe, sizeof (*f));
573 f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
581 static_always_inline void
582 vnet_crypto_async_free_frame (vlib_main_t * vm,
583 vnet_crypto_async_frame_t * frame)
585 vnet_crypto_main_t *cm = &crypto_main;
586 vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
587 pool_put (ct->frame_pool, frame);
590 static_always_inline int
591 vnet_crypto_async_submit_open_frame (vlib_main_t * vm,
592 vnet_crypto_async_frame_t * frame)
594 vnet_crypto_main_t *cm = &crypto_main;
595 vlib_thread_main_t *tm = vlib_get_thread_main ();
599 frame->state = VNET_CRYPTO_FRAME_STATE_PENDING;
600 frame->enqueue_thread_index = vm->thread_index;
602 if (PREDICT_FALSE (cm->enqueue_handlers == NULL))
604 frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
608 int ret = (cm->enqueue_handlers[frame->op]) (vm, frame);
610 if (PREDICT_TRUE (ret == 0))
612 n = vlib_get_node (vm, cm->crypto_node_index);
613 if (n->state == VLIB_NODE_STATE_INTERRUPT)
615 for (i = 0; i < tm->n_vlib_mains; i++)
616 vlib_node_set_interrupt_pending (vlib_get_main_by_index (i),
617 cm->crypto_node_index);
622 frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
628 static_always_inline void
629 vnet_crypto_async_add_to_frame (vlib_main_t *vm, vnet_crypto_async_frame_t *f,
630 u32 key_index, u32 crypto_len,
631 i16 integ_len_adj, i16 crypto_start_offset,
632 i16 integ_start_offset, u32 buffer_index,
633 u16 next_node, u8 *iv, u8 *tag, u8 *aad,
636 vnet_crypto_async_frame_elt_t *fe;
639 ASSERT (f->n_elts < VNET_CRYPTO_FRAME_SIZE);
642 fe = &f->elts[index];
644 fe->key_index = key_index;
645 fe->crypto_total_length = crypto_len;
646 fe->crypto_start_offset = crypto_start_offset;
647 fe->integ_start_offset = integ_start_offset;
648 fe->integ_length_adj = integ_len_adj;
653 f->buffer_indices[index] = buffer_index;
654 f->next_node_index[index] = next_node;
657 static_always_inline void
658 vnet_crypto_async_reset_frame (vnet_crypto_async_frame_t * f)
660 vnet_crypto_async_op_id_t opt;
662 ASSERT ((f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED
663 || f->state == VNET_CRYPTO_FRAME_STATE_ELT_ERROR));
666 clib_memset (f, 0xfe, sizeof (*f));
667 f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
672 static_always_inline u8
673 vnet_crypto_async_frame_is_full (const vnet_crypto_async_frame_t *f)
675 return (f->n_elts == VNET_CRYPTO_FRAME_SIZE);
678 #endif /* included_vnet_crypto_crypto_h */
681 * fd.io coding-style-patch-verification: ON
684 * eval: (c-set-style "gnu")