2 * Copyright (c) 2020 Intel and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 #include <vlib/vlib.h>
17 #include <vnet/plugin/plugin.h>
18 #include <vpp/app/version.h>
20 #include "crypto_sw_scheduler.h"
23 crypto_sw_scheduler_set_worker_crypto (u32 worker_idx, u8 enabled)
25 crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
26 vlib_thread_main_t *tm = vlib_get_thread_main ();
27 crypto_sw_scheduler_per_thread_data_t *ptd = 0;
28 u32 count = 0, i = vlib_num_workers () > 0;
30 if (worker_idx >= vlib_num_workers ())
32 return VNET_API_ERROR_INVALID_VALUE;
35 for (; i < tm->n_vlib_mains; i++)
37 ptd = cm->per_thread_data + i;
38 count += ptd->self_crypto_enabled;
41 if (enabled || count > 1)
43 cm->per_thread_data[vlib_get_worker_thread_index
44 (worker_idx)].self_crypto_enabled = enabled;
46 else /* cannot disable all crypto workers */
48 return VNET_API_ERROR_INVALID_VALUE_2;
54 crypto_sw_scheduler_key_handler (vlib_main_t * vm, vnet_crypto_key_op_t kop,
55 vnet_crypto_key_index_t idx)
57 crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
58 vnet_crypto_key_t *key = vnet_crypto_get_key (idx);
60 vec_validate (cm->keys, idx);
62 if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
64 if (kop == VNET_CRYPTO_KEY_OP_DEL)
66 cm->keys[idx].index_crypto = UINT32_MAX;
67 cm->keys[idx].index_integ = UINT32_MAX;
77 crypto_sw_scheduler_frame_enqueue (vlib_main_t * vm,
78 vnet_crypto_async_frame_t * frame)
80 crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
81 crypto_sw_scheduler_per_thread_data_t *ptd
82 = vec_elt_at_index (cm->per_thread_data, vm->thread_index);
83 crypto_sw_scheduler_queue_t *q = ptd->queues[frame->op];
86 if (q->jobs[head & CRYPTO_SW_SCHEDULER_QUEUE_MASK])
88 u32 n_elts = frame->n_elts, i;
89 for (i = 0; i < n_elts; i++)
90 frame->elts[i].status = VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR;
91 frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
94 frame->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
95 q->jobs[head & CRYPTO_SW_SCHEDULER_QUEUE_MASK] = frame;
97 CLIB_MEMORY_STORE_BARRIER ();
102 static_always_inline vnet_crypto_async_frame_t *
103 crypto_sw_scheduler_get_pending_frame (crypto_sw_scheduler_queue_t * q)
105 vnet_crypto_async_frame_t *f;
110 for (i = tail; i < head; i++)
112 f = q->jobs[i & CRYPTO_SW_SCHEDULER_QUEUE_MASK];
115 if (clib_atomic_bool_cmp_and_swap
116 (&f->state, VNET_CRYPTO_FRAME_STATE_PENDING,
117 VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS))
125 static_always_inline vnet_crypto_async_frame_t *
126 crypto_sw_scheduler_get_completed_frame (crypto_sw_scheduler_queue_t * q)
128 vnet_crypto_async_frame_t *f = 0;
129 if (q->jobs[q->tail & CRYPTO_SW_SCHEDULER_QUEUE_MASK]
130 && q->jobs[q->tail & CRYPTO_SW_SCHEDULER_QUEUE_MASK]->state
131 >= VNET_CRYPTO_FRAME_STATE_SUCCESS)
134 CLIB_MEMORY_STORE_BARRIER ();
136 f = q->jobs[tail & CRYPTO_SW_SCHEDULER_QUEUE_MASK];
137 q->jobs[tail & CRYPTO_SW_SCHEDULER_QUEUE_MASK] = 0;
142 static_always_inline void
143 cryptodev_sw_scheduler_sgl (vlib_main_t * vm,
144 crypto_sw_scheduler_per_thread_data_t * ptd,
145 vlib_buffer_t * b, vnet_crypto_op_t * op,
148 vnet_crypto_op_chunk_t *ch;
149 vlib_buffer_t *nb = b;
151 u32 chunk_index = vec_len (ptd->chunks);
153 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
157 if (nb->current_data + nb->current_length > offset)
159 vec_add2 (ptd->chunks, ch, 1);
160 ch->src = ch->dst = nb->data + offset;
162 = clib_min (nb->current_data + nb->current_length - offset, len);
170 offset -= nb->current_data + nb->current_length;
171 if (nb->flags & VLIB_BUFFER_NEXT_PRESENT)
172 nb = vlib_get_buffer (vm, nb->next_buffer);
177 ASSERT (offset == 0 && len == 0);
178 op->chunk_index = chunk_index;
179 op->n_chunks = n_chunks;
182 static_always_inline void
183 crypto_sw_scheduler_convert_aead (vlib_main_t * vm,
184 crypto_sw_scheduler_per_thread_data_t * ptd,
185 vnet_crypto_async_frame_elt_t * fe,
187 vnet_crypto_op_id_t op_id, u16 aad_len,
190 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
191 vnet_crypto_op_t *op = 0;
193 if (fe->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
195 vec_add2 (ptd->chained_crypto_ops, op, 1);
196 cryptodev_sw_scheduler_sgl (vm, ptd, b, op, fe->crypto_start_offset,
197 fe->crypto_total_length);
201 vec_add2 (ptd->crypto_ops, op, 1);
202 op->src = op->dst = b->data + fe->crypto_start_offset;
203 op->len = fe->crypto_total_length;
208 op->flags = fe->flags;
209 op->key_index = fe->key_index;
212 op->aad_len = aad_len;
213 op->tag_len = tag_len;
214 op->user_data = index;
217 static_always_inline void
218 crypto_sw_scheduler_convert_link_crypto (vlib_main_t * vm,
219 crypto_sw_scheduler_per_thread_data_t
220 * ptd, vnet_crypto_key_t * key,
221 vnet_crypto_async_frame_elt_t * fe,
223 vnet_crypto_op_id_t crypto_op_id,
224 vnet_crypto_op_id_t integ_op_id,
225 u32 digest_len, u8 is_enc)
227 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
228 vnet_crypto_op_t *crypto_op = 0, *integ_op = 0;
230 if (fe->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
232 vec_add2 (ptd->chained_crypto_ops, crypto_op, 1);
233 vec_add2 (ptd->chained_integ_ops, integ_op, 1);
234 cryptodev_sw_scheduler_sgl (vm, ptd, b, crypto_op,
235 fe->crypto_start_offset,
236 fe->crypto_total_length);
237 cryptodev_sw_scheduler_sgl (vm, ptd, b, integ_op,
238 fe->integ_start_offset,
239 fe->crypto_total_length +
240 fe->integ_length_adj);
244 vec_add2 (ptd->crypto_ops, crypto_op, 1);
245 vec_add2 (ptd->integ_ops, integ_op, 1);
246 crypto_op->src = crypto_op->dst = b->data + fe->crypto_start_offset;
247 crypto_op->len = fe->crypto_total_length;
248 integ_op->src = integ_op->dst = b->data + fe->integ_start_offset;
249 integ_op->len = fe->crypto_total_length + fe->integ_length_adj;
252 crypto_op->op = crypto_op_id;
253 crypto_op->iv = fe->iv;
254 crypto_op->key_index = key->index_crypto;
255 crypto_op->user_data = 0;
256 integ_op->op = integ_op_id;
257 integ_op->digest = fe->digest;
258 integ_op->digest_len = digest_len;
259 integ_op->key_index = key->index_integ;
261 crypto_op->flags |= VNET_CRYPTO_OP_FLAG_INIT_IV;
263 integ_op->flags |= VNET_CRYPTO_OP_FLAG_HMAC_CHECK;
264 crypto_op->user_data = integ_op->user_data = index;
267 static_always_inline void
268 process_ops (vlib_main_t * vm, vnet_crypto_async_frame_t * f,
269 vnet_crypto_op_t * ops, u8 * state)
271 u32 n_fail, n_ops = vec_len (ops);
272 vnet_crypto_op_t *op = ops;
277 n_fail = n_ops - vnet_crypto_process_ops (vm, op, n_ops);
281 ASSERT (op - ops < n_ops);
283 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
285 f->elts[op->user_data].status = op->status;
286 *state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
293 static_always_inline void
294 process_chained_ops (vlib_main_t * vm, vnet_crypto_async_frame_t * f,
295 vnet_crypto_op_t * ops, vnet_crypto_op_chunk_t * chunks,
298 u32 n_fail, n_ops = vec_len (ops);
299 vnet_crypto_op_t *op = ops;
304 n_fail = n_ops - vnet_crypto_process_chained_ops (vm, op, chunks, n_ops);
308 ASSERT (op - ops < n_ops);
310 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
312 f->elts[op->user_data].status = op->status;
313 *state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
320 static_always_inline vnet_crypto_async_frame_t *
321 crypto_sw_scheduler_dequeue_aead (vlib_main_t * vm,
322 vnet_crypto_async_op_id_t async_op_id,
323 vnet_crypto_op_id_t sync_op_id, u8 tag_len,
324 u8 aad_len, u32 * nb_elts_processed,
325 u32 * enqueue_thread_idx)
327 crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
328 crypto_sw_scheduler_per_thread_data_t *ptd = 0;
329 crypto_sw_scheduler_queue_t *q = 0;
330 vnet_crypto_async_frame_t *f = 0;
331 vnet_crypto_async_frame_elt_t *fe;
335 u8 state = VNET_CRYPTO_FRAME_STATE_SUCCESS;
337 if (cm->per_thread_data[vm->thread_index].self_crypto_enabled)
340 vec_foreach_index (i, cm->per_thread_data)
342 ptd = cm->per_thread_data + i;
343 q = ptd->queues[async_op_id];
344 f = crypto_sw_scheduler_get_pending_frame (q);
351 ptd = cm->per_thread_data + vm->thread_index;
355 *nb_elts_processed = n_elts = f->n_elts;
357 bi = f->buffer_indices;
359 vec_reset_length (ptd->crypto_ops);
360 vec_reset_length (ptd->chained_crypto_ops);
361 vec_reset_length (ptd->chunks);
366 CLIB_PREFETCH (fe + 1, CLIB_CACHE_LINE_BYTES, LOAD);
368 crypto_sw_scheduler_convert_aead (vm, ptd, fe, fe - f->elts, bi[0],
369 sync_op_id, aad_len, tag_len);
374 process_ops (vm, f, ptd->crypto_ops, &state);
375 process_chained_ops (vm, f, ptd->chained_crypto_ops, ptd->chunks,
378 *enqueue_thread_idx = f->enqueue_thread_index;
381 return crypto_sw_scheduler_get_completed_frame (ptd->queues[async_op_id]);
384 static_always_inline vnet_crypto_async_frame_t *
385 crypto_sw_scheduler_dequeue_link (vlib_main_t * vm,
386 vnet_crypto_async_op_id_t async_op_id,
387 vnet_crypto_op_id_t sync_crypto_op_id,
388 vnet_crypto_op_id_t sync_integ_op_id,
389 u16 digest_len, u8 is_enc,
390 u32 * nb_elts_processed,
391 u32 * enqueue_thread_idx)
393 crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
394 crypto_sw_scheduler_per_thread_data_t *ptd = 0;
395 crypto_sw_scheduler_queue_t *q = 0;
396 vnet_crypto_async_frame_t *f = 0;
397 vnet_crypto_async_frame_elt_t *fe;
401 u8 state = VNET_CRYPTO_FRAME_STATE_SUCCESS;
403 if (cm->per_thread_data[vm->thread_index].self_crypto_enabled)
406 vec_foreach_index (i, cm->per_thread_data)
408 ptd = cm->per_thread_data + i;
409 q = ptd->queues[async_op_id];
410 f = crypto_sw_scheduler_get_pending_frame (q);
417 ptd = cm->per_thread_data + vm->thread_index;
421 vec_reset_length (ptd->crypto_ops);
422 vec_reset_length (ptd->integ_ops);
423 vec_reset_length (ptd->chained_crypto_ops);
424 vec_reset_length (ptd->chained_integ_ops);
425 vec_reset_length (ptd->chunks);
427 *nb_elts_processed = n_elts = f->n_elts;
429 bi = f->buffer_indices;
434 CLIB_PREFETCH (fe + 1, CLIB_CACHE_LINE_BYTES, LOAD);
436 crypto_sw_scheduler_convert_link_crypto (vm, ptd,
437 cm->keys + fe->key_index,
438 fe, fe - f->elts, bi[0],
448 process_ops (vm, f, ptd->crypto_ops, &state);
449 process_chained_ops (vm, f, ptd->chained_crypto_ops, ptd->chunks,
451 process_ops (vm, f, ptd->integ_ops, &state);
452 process_chained_ops (vm, f, ptd->chained_integ_ops, ptd->chunks,
457 process_ops (vm, f, ptd->integ_ops, &state);
458 process_chained_ops (vm, f, ptd->chained_integ_ops, ptd->chunks,
460 process_ops (vm, f, ptd->crypto_ops, &state);
461 process_chained_ops (vm, f, ptd->chained_crypto_ops, ptd->chunks,
466 *enqueue_thread_idx = f->enqueue_thread_index;
469 return crypto_sw_scheduler_get_completed_frame (ptd->queues[async_op_id]);
472 static clib_error_t *
473 sw_scheduler_set_worker_crypto (vlib_main_t * vm, unformat_input_t * input,
474 vlib_cli_command_t * cmd)
476 unformat_input_t _line_input, *line_input = &_line_input;
481 /* Get a line of input. */
482 if (!unformat_user (input, unformat_line_input, line_input))
485 while (unformat_check_input (line_input) != UNFORMAT_END_OF_INPUT)
487 if (unformat (line_input, "worker %u", &worker_index))
489 if (unformat (line_input, "crypto"))
491 if (unformat (line_input, "on"))
493 else if (unformat (line_input, "off"))
496 return (clib_error_return (0, "unknown input '%U'",
497 format_unformat_error,
501 return (clib_error_return (0, "unknown input '%U'",
502 format_unformat_error, line_input));
505 return (clib_error_return (0, "unknown input '%U'",
506 format_unformat_error, line_input));
509 rv = crypto_sw_scheduler_set_worker_crypto (worker_index, crypto_enable);
510 if (rv == VNET_API_ERROR_INVALID_VALUE)
512 return (clib_error_return (0, "invalid worker idx: %d", worker_index));
514 else if (rv == VNET_API_ERROR_INVALID_VALUE_2)
516 return (clib_error_return (0, "cannot disable all crypto workers"));
522 * This command sets if worker will do crypto processing.
525 * Example of how to set worker crypto processing off:
526 * @cliexstart{set sw_scheduler worker 0 crypto off}
530 VLIB_CLI_COMMAND (cmd_set_sw_scheduler_worker_crypto, static) = {
531 .path = "set sw_scheduler",
532 .short_help = "set sw_scheduler worker <idx> crypto <on|off>",
533 .function = sw_scheduler_set_worker_crypto,
538 static clib_error_t *
539 sw_scheduler_show_workers (vlib_main_t * vm, unformat_input_t * input,
540 vlib_cli_command_t * cmd)
542 crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
545 vlib_cli_output (vm, "%-7s%-20s%-8s", "ID", "Name", "Crypto");
546 for (i = vlib_num_workers () >= 0; i < vlib_thread_main.n_vlib_mains; i++)
548 vlib_cli_output (vm, "%-7d%-20s%-8s", vlib_get_worker_index (i),
549 (vlib_worker_threads + i)->name,
551 per_thread_data[i].self_crypto_enabled ? "on" : "off");
558 * This command displays sw_scheduler workers.
561 * Example of how to show workers:
562 * @cliexstart{show sw_scheduler workers}
566 VLIB_CLI_COMMAND (cmd_show_sw_scheduler_workers, static) = {
567 .path = "show sw_scheduler workers",
568 .short_help = "show sw_scheduler workers",
569 .function = sw_scheduler_show_workers,
575 sw_scheduler_cli_init (vlib_main_t * vm)
580 VLIB_INIT_FUNCTION (sw_scheduler_cli_init);
583 #define _(n, s, k, t, a) \
584 static vnet_crypto_async_frame_t \
585 *crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_enc ( \
586 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
588 return crypto_sw_scheduler_dequeue_aead ( \
589 vm, VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
590 VNET_CRYPTO_OP_##n##_ENC, t, a, nb_elts_processed, thread_idx); \
592 static vnet_crypto_async_frame_t \
593 *crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_dec ( \
594 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
596 return crypto_sw_scheduler_dequeue_aead ( \
597 vm, VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
598 VNET_CRYPTO_OP_##n##_DEC, t, a, nb_elts_processed, thread_idx); \
600 foreach_crypto_aead_async_alg
603 #define _(c, h, s, k, d) \
604 static vnet_crypto_async_frame_t \
605 *crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_enc ( \
606 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
608 return crypto_sw_scheduler_dequeue_link ( \
609 vm, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
610 VNET_CRYPTO_OP_##c##_ENC, VNET_CRYPTO_OP_##h##_HMAC, d, 1, \
611 nb_elts_processed, thread_idx); \
613 static vnet_crypto_async_frame_t \
614 *crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_dec ( \
615 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
617 return crypto_sw_scheduler_dequeue_link ( \
618 vm, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
619 VNET_CRYPTO_OP_##c##_DEC, VNET_CRYPTO_OP_##h##_HMAC, d, 0, \
620 nb_elts_processed, thread_idx); \
622 foreach_crypto_link_async_alg
626 crypto_sw_scheduler_main_t crypto_sw_scheduler_main;
628 crypto_sw_scheduler_init (vlib_main_t * vm)
630 crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
631 vlib_thread_main_t *tm = vlib_get_thread_main ();
632 clib_error_t *error = 0;
633 crypto_sw_scheduler_per_thread_data_t *ptd;
635 u32 queue_size = CRYPTO_SW_SCHEDULER_QUEUE_SIZE * sizeof (void *)
636 + sizeof (crypto_sw_scheduler_queue_t);
638 vec_validate_aligned (cm->per_thread_data, tm->n_vlib_mains - 1,
639 CLIB_CACHE_LINE_BYTES);
641 vec_foreach (ptd, cm->per_thread_data)
643 ptd->self_crypto_enabled = 1;
645 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_IDS; i++)
647 crypto_sw_scheduler_queue_t *q
648 = clib_mem_alloc_aligned (queue_size, CLIB_CACHE_LINE_BYTES);
651 clib_memset_u8 (q, 0, queue_size);
655 cm->crypto_engine_index =
656 vnet_crypto_register_engine (vm, "sw_scheduler", 100,
657 "SW Scheduler Async Engine");
659 vnet_crypto_register_key_handler (vm, cm->crypto_engine_index,
660 crypto_sw_scheduler_key_handler);
662 crypto_sw_scheduler_api_init (vm);
665 #define _(n, s, k, t, a) \
666 vnet_crypto_register_async_handler ( \
667 vm, cm->crypto_engine_index, \
668 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
669 crypto_sw_scheduler_frame_enqueue, \
670 crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_enc); \
671 vnet_crypto_register_async_handler ( \
672 vm, cm->crypto_engine_index, \
673 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
674 crypto_sw_scheduler_frame_enqueue, \
675 crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_dec);
676 foreach_crypto_aead_async_alg
679 #define _(c, h, s, k, d) \
680 vnet_crypto_register_async_handler ( \
681 vm, cm->crypto_engine_index, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
682 crypto_sw_scheduler_frame_enqueue, \
683 crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_enc); \
684 vnet_crypto_register_async_handler ( \
685 vm, cm->crypto_engine_index, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
686 crypto_sw_scheduler_frame_enqueue, \
687 crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_dec);
688 foreach_crypto_link_async_alg
693 vec_free (cm->per_thread_data);
699 VLIB_INIT_FUNCTION (crypto_sw_scheduler_init) = {
700 .runs_after = VLIB_INITS ("vnet_crypto_init"),
703 VLIB_PLUGIN_REGISTER () = {
704 .version = VPP_BUILD_VER,
705 .description = "SW Scheduler Crypto Async Engine plugin",
710 * fd.io coding-style-patch-verification: ON
713 * eval: (c-set-style "gnu")