2 * Copyright (c) 2020 Intel and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 #include <vlib/vlib.h>
17 #include <vnet/plugin/plugin.h>
18 #include <vpp/app/version.h>
20 #include "crypto_sw_scheduler.h"
23 crypto_sw_scheduler_set_worker_crypto (u32 worker_idx, u8 enabled)
25 crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
26 vlib_thread_main_t *tm = vlib_get_thread_main ();
27 crypto_sw_scheduler_per_thread_data_t *ptd = 0;
28 u32 count = 0, i = vlib_num_workers () > 0;
30 if (worker_idx >= vlib_num_workers ())
32 return VNET_API_ERROR_INVALID_VALUE;
35 for (; i < tm->n_vlib_mains; i++)
37 ptd = cm->per_thread_data + i;
38 count += ptd->self_crypto_enabled;
41 if (enabled || count > 1)
43 cm->per_thread_data[vlib_get_worker_thread_index
44 (worker_idx)].self_crypto_enabled = enabled;
46 else /* cannot disable all crypto workers */
48 return VNET_API_ERROR_INVALID_VALUE_2;
54 crypto_sw_scheduler_key_handler (vlib_main_t * vm, vnet_crypto_key_op_t kop,
55 vnet_crypto_key_index_t idx)
57 crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
58 vnet_crypto_key_t *key = vnet_crypto_get_key (idx);
60 vec_validate (cm->keys, idx);
62 if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
64 if (kop == VNET_CRYPTO_KEY_OP_DEL)
66 cm->keys[idx].index_crypto = UINT32_MAX;
67 cm->keys[idx].index_integ = UINT32_MAX;
77 crypto_sw_scheduler_frame_enqueue (vlib_main_t * vm,
78 vnet_crypto_async_frame_t * frame)
80 crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
81 crypto_sw_scheduler_per_thread_data_t *ptd
82 = vec_elt_at_index (cm->per_thread_data, vm->thread_index);
83 crypto_sw_scheduler_queue_t *q = ptd->queues[frame->op];
86 if (q->jobs[head & CRYPTO_SW_SCHEDULER_QUEUE_MASK])
88 u32 n_elts = frame->n_elts, i;
89 for (i = 0; i < n_elts; i++)
90 frame->elts[i].status = VNET_CRYPTO_OP_STATUS_FAIL_ENGINE_ERR;
93 q->jobs[head & CRYPTO_SW_SCHEDULER_QUEUE_MASK] = frame;
95 CLIB_MEMORY_STORE_BARRIER ();
100 static_always_inline vnet_crypto_async_frame_t *
101 crypto_sw_scheduler_get_pending_frame (crypto_sw_scheduler_queue_t * q)
103 vnet_crypto_async_frame_t *f;
108 for (i = tail; i < head; i++)
110 f = q->jobs[i & CRYPTO_SW_SCHEDULER_QUEUE_MASK];
113 if (clib_atomic_bool_cmp_and_swap
114 (&f->state, VNET_CRYPTO_FRAME_STATE_PENDING,
115 VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS))
123 static_always_inline vnet_crypto_async_frame_t *
124 crypto_sw_scheduler_get_completed_frame (crypto_sw_scheduler_queue_t * q)
126 vnet_crypto_async_frame_t *f = 0;
127 if (q->jobs[q->tail & CRYPTO_SW_SCHEDULER_QUEUE_MASK]
128 && q->jobs[q->tail & CRYPTO_SW_SCHEDULER_QUEUE_MASK]->state
129 >= VNET_CRYPTO_FRAME_STATE_SUCCESS)
132 CLIB_MEMORY_STORE_BARRIER ();
134 f = q->jobs[tail & CRYPTO_SW_SCHEDULER_QUEUE_MASK];
135 q->jobs[tail & CRYPTO_SW_SCHEDULER_QUEUE_MASK] = 0;
140 static_always_inline void
141 cryptodev_sw_scheduler_sgl (vlib_main_t * vm,
142 crypto_sw_scheduler_per_thread_data_t * ptd,
143 vlib_buffer_t * b, vnet_crypto_op_t * op,
146 vnet_crypto_op_chunk_t *ch;
147 vlib_buffer_t *nb = b;
149 u32 chunk_index = vec_len (ptd->chunks);
151 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
155 if (nb->current_data + nb->current_length > offset)
157 vec_add2 (ptd->chunks, ch, 1);
158 ch->src = ch->dst = nb->data + offset;
160 = clib_min (nb->current_data + nb->current_length - offset, len);
168 offset -= nb->current_data + nb->current_length;
169 if (nb->flags & VLIB_BUFFER_NEXT_PRESENT)
170 nb = vlib_get_buffer (vm, nb->next_buffer);
175 ASSERT (offset == 0 && len == 0);
176 op->chunk_index = chunk_index;
177 op->n_chunks = n_chunks;
180 static_always_inline void
181 crypto_sw_scheduler_convert_aead (vlib_main_t * vm,
182 crypto_sw_scheduler_per_thread_data_t * ptd,
183 vnet_crypto_async_frame_elt_t * fe,
185 vnet_crypto_op_id_t op_id, u16 aad_len,
188 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
189 vnet_crypto_op_t *op = 0;
191 if (fe->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
193 vec_add2 (ptd->chained_crypto_ops, op, 1);
194 cryptodev_sw_scheduler_sgl (vm, ptd, b, op, fe->crypto_start_offset,
195 fe->crypto_total_length);
199 vec_add2 (ptd->crypto_ops, op, 1);
200 op->src = op->dst = b->data + fe->crypto_start_offset;
201 op->len = fe->crypto_total_length;
206 op->flags = fe->flags;
207 op->key_index = fe->key_index;
210 op->aad_len = aad_len;
211 op->tag_len = tag_len;
212 op->user_data = index;
215 static_always_inline void
216 crypto_sw_scheduler_convert_link_crypto (vlib_main_t * vm,
217 crypto_sw_scheduler_per_thread_data_t
218 * ptd, vnet_crypto_key_t * key,
219 vnet_crypto_async_frame_elt_t * fe,
221 vnet_crypto_op_id_t crypto_op_id,
222 vnet_crypto_op_id_t integ_op_id,
223 u32 digest_len, u8 is_enc)
225 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
226 vnet_crypto_op_t *crypto_op = 0, *integ_op = 0;
228 if (fe->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
230 vec_add2 (ptd->chained_crypto_ops, crypto_op, 1);
231 vec_add2 (ptd->chained_integ_ops, integ_op, 1);
232 cryptodev_sw_scheduler_sgl (vm, ptd, b, crypto_op,
233 fe->crypto_start_offset,
234 fe->crypto_total_length);
235 cryptodev_sw_scheduler_sgl (vm, ptd, b, integ_op,
236 fe->integ_start_offset,
237 fe->crypto_total_length +
238 fe->integ_length_adj);
242 vec_add2 (ptd->crypto_ops, crypto_op, 1);
243 vec_add2 (ptd->integ_ops, integ_op, 1);
244 crypto_op->src = crypto_op->dst = b->data + fe->crypto_start_offset;
245 crypto_op->len = fe->crypto_total_length;
246 integ_op->src = integ_op->dst = b->data + fe->integ_start_offset;
247 integ_op->len = fe->crypto_total_length + fe->integ_length_adj;
250 crypto_op->op = crypto_op_id;
251 crypto_op->iv = fe->iv;
252 crypto_op->key_index = key->index_crypto;
253 crypto_op->user_data = 0;
254 integ_op->op = integ_op_id;
255 integ_op->digest = fe->digest;
256 integ_op->digest_len = digest_len;
257 integ_op->key_index = key->index_integ;
258 crypto_op->flags = integ_op->flags = 0;
260 crypto_op->flags |= VNET_CRYPTO_OP_FLAG_INIT_IV;
262 integ_op->flags |= VNET_CRYPTO_OP_FLAG_HMAC_CHECK;
263 crypto_op->user_data = integ_op->user_data = index;
266 static_always_inline void
267 process_ops (vlib_main_t * vm, vnet_crypto_async_frame_t * f,
268 vnet_crypto_op_t * ops, u8 * state)
270 u32 n_fail, n_ops = vec_len (ops);
271 vnet_crypto_op_t *op = ops;
276 n_fail = n_ops - vnet_crypto_process_ops (vm, op, n_ops);
280 ASSERT (op - ops < n_ops);
282 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
284 f->elts[op->user_data].status = op->status;
285 *state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
292 static_always_inline void
293 process_chained_ops (vlib_main_t * vm, vnet_crypto_async_frame_t * f,
294 vnet_crypto_op_t * ops, vnet_crypto_op_chunk_t * chunks,
297 u32 n_fail, n_ops = vec_len (ops);
298 vnet_crypto_op_t *op = ops;
303 n_fail = n_ops - vnet_crypto_process_chained_ops (vm, op, chunks, n_ops);
307 ASSERT (op - ops < n_ops);
309 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
311 f->elts[op->user_data].status = op->status;
312 *state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
319 static_always_inline vnet_crypto_async_frame_t *
320 crypto_sw_scheduler_dequeue_aead (vlib_main_t * vm,
321 vnet_crypto_async_op_id_t async_op_id,
322 vnet_crypto_op_id_t sync_op_id, u8 tag_len,
323 u8 aad_len, u32 * nb_elts_processed,
324 u32 * enqueue_thread_idx)
326 crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
327 crypto_sw_scheduler_per_thread_data_t *ptd = 0;
328 crypto_sw_scheduler_queue_t *q = 0;
329 vnet_crypto_async_frame_t *f = 0;
330 vnet_crypto_async_frame_elt_t *fe;
334 u8 state = VNET_CRYPTO_FRAME_STATE_SUCCESS;
336 if (cm->per_thread_data[vm->thread_index].self_crypto_enabled)
339 vec_foreach_index (i, cm->per_thread_data)
341 ptd = cm->per_thread_data + i;
342 q = ptd->queues[async_op_id];
343 f = crypto_sw_scheduler_get_pending_frame (q);
350 ptd = cm->per_thread_data + vm->thread_index;
354 *nb_elts_processed = n_elts = f->n_elts;
356 bi = f->buffer_indices;
358 vec_reset_length (ptd->crypto_ops);
359 vec_reset_length (ptd->chained_crypto_ops);
360 vec_reset_length (ptd->chunks);
365 CLIB_PREFETCH (fe + 1, CLIB_CACHE_LINE_BYTES, LOAD);
367 crypto_sw_scheduler_convert_aead (vm, ptd, fe, fe - f->elts, bi[0],
368 sync_op_id, aad_len, tag_len);
373 process_ops (vm, f, ptd->crypto_ops, &state);
374 process_chained_ops (vm, f, ptd->chained_crypto_ops, ptd->chunks,
377 *enqueue_thread_idx = f->enqueue_thread_index;
380 return crypto_sw_scheduler_get_completed_frame (ptd->queues[async_op_id]);
383 static_always_inline vnet_crypto_async_frame_t *
384 crypto_sw_scheduler_dequeue_link (vlib_main_t * vm,
385 vnet_crypto_async_op_id_t async_op_id,
386 vnet_crypto_op_id_t sync_crypto_op_id,
387 vnet_crypto_op_id_t sync_integ_op_id,
388 u16 digest_len, u8 is_enc,
389 u32 * nb_elts_processed,
390 u32 * enqueue_thread_idx)
392 crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
393 crypto_sw_scheduler_per_thread_data_t *ptd = 0;
394 crypto_sw_scheduler_queue_t *q = 0;
395 vnet_crypto_async_frame_t *f = 0;
396 vnet_crypto_async_frame_elt_t *fe;
400 u8 state = VNET_CRYPTO_FRAME_STATE_SUCCESS;
402 if (cm->per_thread_data[vm->thread_index].self_crypto_enabled)
405 vec_foreach_index (i, cm->per_thread_data)
407 ptd = cm->per_thread_data + i;
408 q = ptd->queues[async_op_id];
409 f = crypto_sw_scheduler_get_pending_frame (q);
416 ptd = cm->per_thread_data + vm->thread_index;
420 vec_reset_length (ptd->crypto_ops);
421 vec_reset_length (ptd->integ_ops);
422 vec_reset_length (ptd->chained_crypto_ops);
423 vec_reset_length (ptd->chained_integ_ops);
424 vec_reset_length (ptd->chunks);
426 *nb_elts_processed = n_elts = f->n_elts;
428 bi = f->buffer_indices;
433 CLIB_PREFETCH (fe + 1, CLIB_CACHE_LINE_BYTES, LOAD);
435 crypto_sw_scheduler_convert_link_crypto (vm, ptd,
436 cm->keys + fe->key_index,
437 fe, fe - f->elts, bi[0],
447 process_ops (vm, f, ptd->crypto_ops, &state);
448 process_chained_ops (vm, f, ptd->chained_crypto_ops, ptd->chunks,
450 process_ops (vm, f, ptd->integ_ops, &state);
451 process_chained_ops (vm, f, ptd->chained_integ_ops, ptd->chunks,
456 process_ops (vm, f, ptd->integ_ops, &state);
457 process_chained_ops (vm, f, ptd->chained_integ_ops, ptd->chunks,
459 process_ops (vm, f, ptd->crypto_ops, &state);
460 process_chained_ops (vm, f, ptd->chained_crypto_ops, ptd->chunks,
465 *enqueue_thread_idx = f->enqueue_thread_index;
468 return crypto_sw_scheduler_get_completed_frame (ptd->queues[async_op_id]);
471 static clib_error_t *
472 sw_scheduler_set_worker_crypto (vlib_main_t * vm, unformat_input_t * input,
473 vlib_cli_command_t * cmd)
475 unformat_input_t _line_input, *line_input = &_line_input;
480 /* Get a line of input. */
481 if (!unformat_user (input, unformat_line_input, line_input))
484 while (unformat_check_input (line_input) != UNFORMAT_END_OF_INPUT)
486 if (unformat (line_input, "worker %u", &worker_index))
488 if (unformat (line_input, "crypto"))
490 if (unformat (line_input, "on"))
492 else if (unformat (line_input, "off"))
495 return (clib_error_return (0, "unknown input '%U'",
496 format_unformat_error,
500 return (clib_error_return (0, "unknown input '%U'",
501 format_unformat_error, line_input));
504 return (clib_error_return (0, "unknown input '%U'",
505 format_unformat_error, line_input));
508 rv = crypto_sw_scheduler_set_worker_crypto (worker_index, crypto_enable);
509 if (rv == VNET_API_ERROR_INVALID_VALUE)
511 return (clib_error_return (0, "invalid worker idx: %d", worker_index));
513 else if (rv == VNET_API_ERROR_INVALID_VALUE_2)
515 return (clib_error_return (0, "cannot disable all crypto workers"));
521 * This command sets if worker will do crypto processing.
524 * Example of how to set worker crypto processing off:
525 * @cliexstart{set sw_scheduler worker 0 crypto off}
529 VLIB_CLI_COMMAND (cmd_set_sw_scheduler_worker_crypto, static) = {
530 .path = "set sw_scheduler",
531 .short_help = "set sw_scheduler worker <idx> crypto <on|off>",
532 .function = sw_scheduler_set_worker_crypto,
537 static clib_error_t *
538 sw_scheduler_show_workers (vlib_main_t * vm, unformat_input_t * input,
539 vlib_cli_command_t * cmd)
541 crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
544 vlib_cli_output (vm, "%-7s%-20s%-8s", "ID", "Name", "Crypto");
545 for (i = 1; i < vlib_thread_main.n_vlib_mains; i++)
547 vlib_cli_output (vm, "%-7d%-20s%-8s", vlib_get_worker_index (i),
548 (vlib_worker_threads + i)->name,
550 per_thread_data[i].self_crypto_enabled ? "on" : "off");
557 * This command displays sw_scheduler workers.
560 * Example of how to show workers:
561 * @cliexstart{show sw_scheduler workers}
565 VLIB_CLI_COMMAND (cmd_show_sw_scheduler_workers, static) = {
566 .path = "show sw_scheduler workers",
567 .short_help = "show sw_scheduler workers",
568 .function = sw_scheduler_show_workers,
574 sw_scheduler_cli_init (vlib_main_t * vm)
579 VLIB_INIT_FUNCTION (sw_scheduler_cli_init);
582 #define _(n, s, k, t, a) \
583 static vnet_crypto_async_frame_t \
584 *crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_enc ( \
585 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
587 return crypto_sw_scheduler_dequeue_aead ( \
588 vm, VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
589 VNET_CRYPTO_OP_##n##_ENC, t, a, nb_elts_processed, thread_idx); \
591 static vnet_crypto_async_frame_t \
592 *crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_dec ( \
593 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
595 return crypto_sw_scheduler_dequeue_aead ( \
596 vm, VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
597 VNET_CRYPTO_OP_##n##_DEC, t, a, nb_elts_processed, thread_idx); \
599 foreach_crypto_aead_async_alg
602 #define _(c, h, s, k, d) \
603 static vnet_crypto_async_frame_t \
604 *crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_enc ( \
605 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
607 return crypto_sw_scheduler_dequeue_link ( \
608 vm, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
609 VNET_CRYPTO_OP_##c##_ENC, VNET_CRYPTO_OP_##h##_HMAC, d, 1, \
610 nb_elts_processed, thread_idx); \
612 static vnet_crypto_async_frame_t \
613 *crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_dec ( \
614 vlib_main_t *vm, u32 *nb_elts_processed, u32 * thread_idx) \
616 return crypto_sw_scheduler_dequeue_link ( \
617 vm, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
618 VNET_CRYPTO_OP_##c##_DEC, VNET_CRYPTO_OP_##h##_HMAC, d, 0, \
619 nb_elts_processed, thread_idx); \
621 foreach_crypto_link_async_alg
625 crypto_sw_scheduler_main_t crypto_sw_scheduler_main;
627 crypto_sw_scheduler_init (vlib_main_t * vm)
629 crypto_sw_scheduler_main_t *cm = &crypto_sw_scheduler_main;
630 vlib_thread_main_t *tm = vlib_get_thread_main ();
631 clib_error_t *error = 0;
632 crypto_sw_scheduler_per_thread_data_t *ptd;
634 u32 queue_size = CRYPTO_SW_SCHEDULER_QUEUE_SIZE * sizeof (void *)
635 + sizeof (crypto_sw_scheduler_queue_t);
637 vec_validate_aligned (cm->per_thread_data, tm->n_vlib_mains - 1,
638 CLIB_CACHE_LINE_BYTES);
640 vec_foreach (ptd, cm->per_thread_data)
642 ptd->self_crypto_enabled = 1;
644 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_IDS; i++)
646 crypto_sw_scheduler_queue_t *q
647 = clib_mem_alloc_aligned (queue_size, CLIB_CACHE_LINE_BYTES);
650 clib_memset_u8 (q, 0, queue_size);
654 cm->crypto_engine_index =
655 vnet_crypto_register_engine (vm, "sw_scheduler", 100,
656 "SW Scheduler Async Engine");
658 vnet_crypto_register_key_handler (vm, cm->crypto_engine_index,
659 crypto_sw_scheduler_key_handler);
661 crypto_sw_scheduler_api_init (vm);
664 #define _(n, s, k, t, a) \
665 vnet_crypto_register_async_handler ( \
666 vm, cm->crypto_engine_index, \
667 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
668 crypto_sw_scheduler_frame_enqueue, \
669 crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_enc); \
670 vnet_crypto_register_async_handler ( \
671 vm, cm->crypto_engine_index, \
672 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
673 crypto_sw_scheduler_frame_enqueue, \
674 crypto_sw_scheduler_frame_dequeue_##n##_TAG_##t##_AAD_##a##_dec);
675 foreach_crypto_aead_async_alg
678 #define _(c, h, s, k, d) \
679 vnet_crypto_register_async_handler ( \
680 vm, cm->crypto_engine_index, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
681 crypto_sw_scheduler_frame_enqueue, \
682 crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_enc); \
683 vnet_crypto_register_async_handler ( \
684 vm, cm->crypto_engine_index, VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
685 crypto_sw_scheduler_frame_enqueue, \
686 crypto_sw_scheduler_frame_dequeue_##c##_##h##_TAG##d##_dec);
687 foreach_crypto_link_async_alg
692 vec_free (cm->per_thread_data);
698 VLIB_INIT_FUNCTION (crypto_sw_scheduler_init) = {
699 .runs_after = VLIB_INITS ("vnet_crypto_init"),
702 VLIB_PLUGIN_REGISTER () = {
703 .version = VPP_BUILD_VER,
704 .description = "SW Scheduler Crypto Async Engine plugin",
709 * fd.io coding-style-patch-verification: ON
712 * eval: (c-set-style "gnu")