2 * Copyright (c) 2017 Intel and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
15 #include <vnet/vnet.h>
16 #include <vnet/ip/ip.h>
17 #include <vnet/api_errno.h>
18 #include <vnet/ipsec/ipsec.h>
19 #include <vlib/node_funcs.h>
22 #include <dpdk/device/dpdk.h>
23 #include <dpdk/buffer.h>
24 #include <dpdk/ipsec/ipsec.h>
26 dpdk_crypto_main_t dpdk_crypto_main;
28 #define EMPTY_STRUCT {0}
29 #define NUM_CRYPTO_MBUFS 16384
32 algos_init (u32 n_mains)
34 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
37 vec_validate_aligned (dcm->cipher_algs, IPSEC_CRYPTO_N_ALG - 1, 8);
41 dcm->cipher_algs[IPSEC_CRYPTO_ALG_##f].name = str; \
42 dcm->cipher_algs[IPSEC_CRYPTO_ALG_##f].disabled = n_mains;
43 foreach_ipsec_crypto_alg
47 /* Minimum boundary for ciphers is 4B, required by ESP */
48 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_NONE];
49 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
50 a->alg = RTE_CRYPTO_CIPHER_NULL;
51 a->boundary = 4; /* 1 */
55 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_128];
56 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
57 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
62 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_192];
63 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
64 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
69 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_256];
70 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
71 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
76 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_128];
77 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
78 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
79 a->boundary = 4; /* 1 */
83 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_192];
84 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
85 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
86 a->boundary = 4; /* 1 */
90 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_256];
91 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
92 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
93 a->boundary = 4; /* 1 */
97 #define AES_GCM_TYPE RTE_CRYPTO_SYM_XFORM_AEAD
98 #define AES_GCM_ALG RTE_CRYPTO_AEAD_AES_GCM
100 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_128];
101 a->type = AES_GCM_TYPE;
102 a->alg = AES_GCM_ALG;
103 a->boundary = 4; /* 1 */
108 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_192];
109 a->type = AES_GCM_TYPE;
110 a->alg = AES_GCM_ALG;
111 a->boundary = 4; /* 1 */
116 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_256];
117 a->type = AES_GCM_TYPE;
118 a->alg = AES_GCM_ALG;
119 a->boundary = 4; /* 1 */
124 vec_validate (dcm->auth_algs, IPSEC_INTEG_N_ALG - 1);
128 dcm->auth_algs[IPSEC_INTEG_ALG_##f].name = str; \
129 dcm->auth_algs[IPSEC_INTEG_ALG_##f].disabled = n_mains;
130 foreach_ipsec_integ_alg
134 a = &dcm->auth_algs[IPSEC_INTEG_ALG_NONE];
135 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
136 a->alg = RTE_CRYPTO_AUTH_NULL;
140 a = &dcm->auth_algs[IPSEC_INTEG_ALG_MD5_96];
141 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
142 a->alg = RTE_CRYPTO_AUTH_MD5_HMAC;
146 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA1_96];
147 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
148 a->alg = RTE_CRYPTO_AUTH_SHA1_HMAC;
152 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_256_96];
153 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
154 a->alg = RTE_CRYPTO_AUTH_SHA256_HMAC;
158 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_256_128];
159 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
160 a->alg = RTE_CRYPTO_AUTH_SHA256_HMAC;
164 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_384_192];
165 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
166 a->alg = RTE_CRYPTO_AUTH_SHA384_HMAC;
170 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_512_256];
171 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
172 a->alg = RTE_CRYPTO_AUTH_SHA512_HMAC;
178 cipher_alg_index (const crypto_alg_t * alg)
180 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
182 return (alg - dcm->cipher_algs);
186 auth_alg_index (const crypto_alg_t * alg)
188 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
190 return (alg - dcm->auth_algs);
193 static crypto_alg_t *
194 cipher_cap_to_alg (const struct rte_cryptodev_capabilities *cap, u8 key_len)
196 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
199 if (cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
203 vec_foreach (alg, dcm->cipher_algs)
205 if ((cap->sym.xform_type == RTE_CRYPTO_SYM_XFORM_CIPHER) &&
206 (alg->type == RTE_CRYPTO_SYM_XFORM_CIPHER) &&
207 (cap->sym.cipher.algo == alg->alg) &&
208 (alg->key_len == key_len))
210 if ((cap->sym.xform_type == RTE_CRYPTO_SYM_XFORM_AEAD) &&
211 (alg->type == RTE_CRYPTO_SYM_XFORM_AEAD) &&
212 (cap->sym.aead.algo == alg->alg) &&
213 (alg->key_len == key_len))
221 static crypto_alg_t *
222 auth_cap_to_alg (const struct rte_cryptodev_capabilities *cap, u8 trunc_size)
224 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
227 if ((cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC) ||
228 (cap->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH))
232 vec_foreach (alg, dcm->auth_algs)
234 if ((cap->sym.auth.algo == alg->alg) &&
235 (alg->trunc_size == trunc_size))
244 crypto_set_aead_xform (struct rte_crypto_sym_xform *xform,
245 ipsec_sa_t * sa, u8 is_outbound)
247 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
250 c = vec_elt_at_index (dcm->cipher_algs, sa->crypto_alg);
252 ASSERT (c->type == RTE_CRYPTO_SYM_XFORM_AEAD);
254 xform->type = RTE_CRYPTO_SYM_XFORM_AEAD;
255 xform->aead.algo = c->alg;
256 xform->aead.key.data = sa->crypto_key.data;
257 xform->aead.key.length = c->key_len;
258 xform->aead.iv.offset =
259 crypto_op_get_priv_offset () + offsetof (dpdk_op_priv_t, cb);
260 xform->aead.iv.length = 12;
261 xform->aead.digest_length = c->trunc_size;
262 xform->aead.aad_length = ipsec_sa_is_set_USE_ESN (sa) ? 12 : 8;
266 xform->aead.op = RTE_CRYPTO_AEAD_OP_ENCRYPT;
268 xform->aead.op = RTE_CRYPTO_AEAD_OP_DECRYPT;
272 crypto_set_cipher_xform (struct rte_crypto_sym_xform *xform,
273 ipsec_sa_t * sa, u8 is_outbound)
275 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
278 c = vec_elt_at_index (dcm->cipher_algs, sa->crypto_alg);
280 ASSERT (c->type == RTE_CRYPTO_SYM_XFORM_CIPHER);
282 xform->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
283 xform->cipher.algo = c->alg;
284 xform->cipher.key.data = sa->crypto_key.data;
285 xform->cipher.key.length = c->key_len;
286 xform->cipher.iv.offset =
287 crypto_op_get_priv_offset () + offsetof (dpdk_op_priv_t, cb);
288 xform->cipher.iv.length = c->iv_len;
292 xform->cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
294 xform->cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT;
298 crypto_set_auth_xform (struct rte_crypto_sym_xform *xform,
299 ipsec_sa_t * sa, u8 is_outbound)
301 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
304 a = vec_elt_at_index (dcm->auth_algs, sa->integ_alg);
306 ASSERT (a->type == RTE_CRYPTO_SYM_XFORM_AUTH);
308 xform->type = RTE_CRYPTO_SYM_XFORM_AUTH;
309 xform->auth.algo = a->alg;
310 xform->auth.key.data = sa->integ_key.data;
311 xform->auth.key.length = a->key_len;
312 xform->auth.digest_length = a->trunc_size;
316 xform->auth.op = RTE_CRYPTO_AUTH_OP_GENERATE;
318 xform->auth.op = RTE_CRYPTO_AUTH_OP_VERIFY;
322 create_sym_session (struct rte_cryptodev_sym_session **session,
324 crypto_resource_t * res,
325 crypto_worker_main_t * cwm, u8 is_outbound)
327 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
328 ipsec_main_t *im = &ipsec_main;
331 struct rte_crypto_sym_xform cipher_xform = { 0 };
332 struct rte_crypto_sym_xform auth_xform = { 0 };
333 struct rte_crypto_sym_xform *xfs;
334 struct rte_cryptodev_sym_session **s;
335 clib_error_t *error = 0;
338 sa = pool_elt_at_index (im->sad, sa_idx);
340 if ((sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_128) |
341 (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_192) |
342 (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_256))
344 crypto_set_aead_xform (&cipher_xform, sa, is_outbound);
349 crypto_set_cipher_xform (&cipher_xform, sa, is_outbound);
350 crypto_set_auth_xform (&auth_xform, sa, is_outbound);
354 cipher_xform.next = &auth_xform;
359 auth_xform.next = &cipher_xform;
364 data = vec_elt_at_index (dcm->data, res->numa);
365 clib_spinlock_lock_if_init (&data->lockp);
369 * Multiple worker/threads share the session for an SA
370 * Single session per SA, initialized for each device driver
372 s = (void *) hash_get (data->session_by_sa_index, sa_idx);
376 session[0] = rte_cryptodev_sym_session_create (data->session_h);
379 data->session_h_failed += 1;
380 error = clib_error_return (0, "failed to create session header");
383 hash_set (data->session_by_sa_index, sa_idx, session[0]);
388 struct rte_mempool **mp;
389 mp = vec_elt_at_index (data->session_drv, res->drv_id);
390 ASSERT (mp[0] != NULL);
393 rte_cryptodev_sym_session_init (res->dev_id, session[0], xfs, mp[0]);
396 data->session_drv_failed[res->drv_id] += 1;
397 error = clib_error_return (0, "failed to init session for drv %u",
402 add_session_by_drv_and_sa_idx (session[0], data, res->drv_id, sa_idx);
405 clib_spinlock_unlock_if_init (&data->lockp);
409 static void __attribute__ ((unused)) clear_and_free_obj (void *obj)
411 struct rte_mempool *mp = rte_mempool_from_obj (obj);
413 clib_memset (obj, 0, mp->elt_size);
415 rte_mempool_put (mp, obj);
418 /* This is from rte_cryptodev_pmd.h */
420 get_session_private_data (const struct rte_cryptodev_sym_session *sess,
423 #if RTE_VERSION < RTE_VERSION_NUM(19, 2, 0, 0)
424 return sess->sess_private_data[driver_id];
426 if (unlikely (sess->nb_drivers <= driver_id))
429 return sess->sess_data[driver_id].data;
433 /* This is from rte_cryptodev_pmd.h */
435 set_session_private_data (struct rte_cryptodev_sym_session *sess,
436 uint8_t driver_id, void *private_data)
438 #if RTE_VERSION < RTE_VERSION_NUM(19, 2, 0, 0)
439 sess->sess_private_data[driver_id] = private_data;
441 if (unlikely (sess->nb_drivers <= driver_id))
443 sess->sess_data[driver_id].data = private_data;
447 static clib_error_t *
448 dpdk_crypto_session_disposal (crypto_session_disposal_t * v, u64 ts)
450 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
451 crypto_session_disposal_t *s;
459 /* ordered vector by timestamp */
460 if (!(s->ts + dcm->session_timeout < ts))
463 vec_foreach_index (drv_id, dcm->drv)
465 drv_session = get_session_private_data (s->session, drv_id);
470 * Custom clear to avoid finding a dev_id for drv_id:
471 * ret = rte_cryptodev_sym_session_clear (dev_id, drv_session);
474 clear_and_free_obj (drv_session);
476 set_session_private_data (s->session, drv_id, NULL);
479 if (rte_mempool_from_obj(s->session))
481 ret = rte_cryptodev_sym_session_free (s->session);
488 vec_delete (v, s - v, 0);
490 vec_reset_length (v);
495 static clib_error_t *
496 add_del_sa_session (u32 sa_index, u8 is_add)
498 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
500 struct rte_cryptodev_sym_session *s;
508 vec_foreach (data, dcm->data)
510 clib_spinlock_lock_if_init (&data->lockp);
511 val = hash_get (data->session_by_sa_index, sa_index);
514 s = (struct rte_cryptodev_sym_session *) val[0];
515 vec_foreach_index (drv_id, dcm->drv)
517 val = (uword*) get_session_by_drv_and_sa_idx (data, drv_id, sa_index);
519 add_session_by_drv_and_sa_idx(NULL, data, drv_id, sa_index);
522 hash_unset (data->session_by_sa_index, sa_index);
524 u64 ts = unix_time_now_nsec ();
525 dpdk_crypto_session_disposal (data->session_disposal, ts);
527 crypto_session_disposal_t sd;
531 vec_add1 (data->session_disposal, sd);
533 clib_spinlock_unlock_if_init (&data->lockp);
540 static clib_error_t *
541 dpdk_ipsec_check_support (ipsec_sa_t * sa)
543 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
545 if (sa->integ_alg == IPSEC_INTEG_ALG_NONE)
546 switch (sa->crypto_alg)
548 case IPSEC_CRYPTO_ALG_NONE:
549 case IPSEC_CRYPTO_ALG_AES_GCM_128:
550 case IPSEC_CRYPTO_ALG_AES_GCM_192:
551 case IPSEC_CRYPTO_ALG_AES_GCM_256:
554 return clib_error_return (0, "unsupported integ-alg %U crypto-alg %U",
555 format_ipsec_integ_alg, sa->integ_alg,
556 format_ipsec_crypto_alg, sa->crypto_alg);
559 /* XXX do we need the NONE check? */
560 if (sa->crypto_alg != IPSEC_CRYPTO_ALG_NONE &&
561 dcm->cipher_algs[sa->crypto_alg].disabled)
562 return clib_error_return (0, "disabled crypto-alg %U",
563 format_ipsec_crypto_alg, sa->crypto_alg);
565 /* XXX do we need the NONE check? */
566 if (sa->integ_alg != IPSEC_INTEG_ALG_NONE &&
567 dcm->auth_algs[sa->integ_alg].disabled)
568 return clib_error_return (0, "disabled integ-alg %U",
569 format_ipsec_integ_alg, sa->integ_alg);
574 crypto_parse_capabilities (crypto_dev_t * dev,
575 const struct rte_cryptodev_capabilities *cap,
578 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
582 for (; cap->op != RTE_CRYPTO_OP_TYPE_UNDEFINED; cap++)
584 /* A single capability maps to multiple cipher/auth algorithms */
585 switch (cap->sym.xform_type)
587 case RTE_CRYPTO_SYM_XFORM_AEAD:
588 case RTE_CRYPTO_SYM_XFORM_CIPHER:
589 inc = cap->sym.cipher.key_size.increment;
591 for (len = cap->sym.cipher.key_size.min;
592 len <= cap->sym.cipher.key_size.max; len += inc)
594 alg = cipher_cap_to_alg (cap, len);
597 dev->cipher_support[cipher_alg_index (alg)] = 1;
598 alg->resources += vec_len (dev->free_resources);
599 /* At least enough resources to support one algo */
600 dcm->enabled |= (alg->resources >= n_mains);
603 case RTE_CRYPTO_SYM_XFORM_AUTH:
604 inc = cap->sym.auth.digest_size.increment;
606 for (len = cap->sym.auth.digest_size.min;
607 len <= cap->sym.auth.digest_size.max; len += inc)
609 alg = auth_cap_to_alg (cap, len);
612 dev->auth_support[auth_alg_index (alg)] = 1;
613 alg->resources += vec_len (dev->free_resources);
614 /* At least enough resources to support one algo */
615 dcm->enabled |= (alg->resources >= n_mains);
624 static clib_error_t *
625 crypto_dev_conf (u8 dev, u16 n_qp, u8 numa)
627 struct rte_cryptodev_config dev_conf = { 0 };
628 struct rte_cryptodev_qp_conf qp_conf = { 0 };
633 dev_conf.socket_id = numa;
634 dev_conf.nb_queue_pairs = n_qp;
636 error_str = "failed to configure crypto device %u";
637 ret = rte_cryptodev_configure (dev, &dev_conf);
639 return clib_error_return (0, error_str, dev);
641 error_str = "failed to setup crypto device %u queue pair %u";
642 qp_conf.nb_descriptors = DPDK_CRYPTO_N_QUEUE_DESC;
643 for (qp = 0; qp < n_qp; qp++)
645 #if RTE_VERSION < RTE_VERSION_NUM(19, 2, 0, 0)
646 ret = rte_cryptodev_queue_pair_setup (dev, qp, &qp_conf, numa, NULL);
648 ret = rte_cryptodev_queue_pair_setup (dev, qp, &qp_conf, numa);
651 return clib_error_return (0, error_str, dev, qp);
654 error_str = "failed to start crypto device %u";
655 if (rte_cryptodev_start (dev))
656 return clib_error_return (0, error_str, dev);
662 crypto_scan_devs (u32 n_mains)
664 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
665 struct rte_cryptodev *cryptodev;
666 struct rte_cryptodev_info info = { 0 };
668 crypto_resource_t *res;
671 u16 max_res_idx, res_idx, j;
674 vec_validate_init_empty (dcm->dev, rte_cryptodev_count () - 1,
675 (crypto_dev_t) EMPTY_STRUCT);
677 for (i = 0; i < rte_cryptodev_count (); i++)
679 dev = vec_elt_at_index (dcm->dev, i);
681 cryptodev = &rte_cryptodevs[i];
682 rte_cryptodev_info_get (i, &info);
685 dev->name = cryptodev->data->name;
686 dev->numa = rte_cryptodev_socket_id (i);
687 dev->features = info.feature_flags;
688 dev->max_qp = info.max_nb_queue_pairs;
689 drv_id = info.driver_id;
690 if (drv_id >= vec_len (dcm->drv))
691 vec_validate_init_empty (dcm->drv, drv_id,
692 (crypto_drv_t) EMPTY_STRUCT);
693 vec_elt_at_index (dcm->drv, drv_id)->name = info.driver_name;
694 dev->drv_id = drv_id;
695 vec_add1 (vec_elt_at_index (dcm->drv, drv_id)->devs, i);
697 if (!(info.feature_flags & RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING))
700 if ((error = crypto_dev_conf (i, dev->max_qp, dev->numa)))
702 clib_error_report (error);
706 max_res_idx = dev->max_qp - 1;
708 vec_validate (dev->free_resources, max_res_idx);
710 res_idx = vec_len (dcm->resource);
711 vec_validate_init_empty_aligned (dcm->resource, res_idx + max_res_idx,
712 (crypto_resource_t) EMPTY_STRUCT,
713 CLIB_CACHE_LINE_BYTES);
715 for (j = 0; j <= max_res_idx; j++)
717 vec_elt (dev->free_resources, max_res_idx - j) = res_idx + j;
718 res = &dcm->resource[res_idx + j];
720 res->drv_id = drv_id;
722 res->numa = dev->numa;
723 res->thread_idx = (u16) ~ 0;
726 crypto_parse_capabilities (dev, info.capabilities, n_mains);
731 crypto_auto_placement (void)
733 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
734 crypto_resource_t *res;
735 crypto_worker_main_t *cwm;
737 u32 thread_idx, skip_master;
742 skip_master = vlib_num_workers () > 0;
745 vec_foreach (dev, dcm->dev)
747 vec_foreach_index (thread_idx, dcm->workers_main)
749 if (vec_len (dev->free_resources) == 0)
752 if (thread_idx < skip_master)
755 /* Check thread is not already using the device */
756 vec_foreach (idx, dev->used_resources)
757 if (dcm->resource[idx[0]].thread_idx == thread_idx)
760 cwm = vec_elt_at_index (dcm->workers_main, thread_idx);
763 res_idx = vec_pop (dev->free_resources);
765 /* Set device only for supported algos */
766 for (i = 0; i < IPSEC_CRYPTO_N_ALG; i++)
767 if (dev->cipher_support[i] &&
768 cwm->cipher_resource_idx[i] == (u16) ~0)
770 dcm->cipher_algs[i].disabled--;
771 cwm->cipher_resource_idx[i] = res_idx;
775 for (i = 0; i < IPSEC_INTEG_N_ALG; i++)
776 if (dev->auth_support[i] &&
777 cwm->auth_resource_idx[i] == (u16) ~0)
779 dcm->auth_algs[i].disabled--;
780 cwm->auth_resource_idx[i] = res_idx;
786 vec_add1 (dev->free_resources, res_idx);
790 vec_add1 (dev->used_resources, res_idx);
792 res = vec_elt_at_index (dcm->resource, res_idx);
794 ASSERT (res->thread_idx == (u16) ~0);
795 res->thread_idx = thread_idx;
797 /* Add device to vector of polling resources */
798 vec_add1 (cwm->resource_idx, res_idx);
805 crypto_op_init (struct rte_mempool *mempool,
806 void *_arg __attribute__ ((unused)),
807 void *_obj, unsigned i __attribute__ ((unused)))
809 struct rte_crypto_op *op = _obj;
811 op->sess_type = RTE_CRYPTO_OP_WITH_SESSION;
812 op->type = RTE_CRYPTO_OP_TYPE_SYMMETRIC;
813 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
814 op->phys_addr = rte_mempool_virt2iova (_obj);
815 op->mempool = mempool;
818 static clib_error_t *
819 crypto_create_crypto_op_pool (vlib_main_t * vm, u8 numa)
821 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
822 dpdk_config_main_t *conf = &dpdk_config_main;
825 u32 pool_priv_size = sizeof (struct rte_crypto_op_pool_private);
826 struct rte_crypto_op_pool_private *priv;
827 struct rte_mempool *mp;
829 data = vec_elt_at_index (dcm->data, numa);
831 /* Already allocated */
835 pool_name = format (0, "crypto_pool_numa%u%c", numa, 0);
837 if (conf->num_crypto_mbufs == 0)
838 conf->num_crypto_mbufs = NUM_CRYPTO_MBUFS;
840 mp = rte_mempool_create ((char *) pool_name, conf->num_crypto_mbufs,
841 crypto_op_len (), 512, pool_priv_size, NULL, NULL,
842 crypto_op_init, NULL, numa, 0);
844 vec_free (pool_name);
847 return clib_error_return (0, "failed to create crypto op mempool");
849 /* Initialize mempool private data */
850 priv = rte_mempool_get_priv (mp);
851 priv->priv_size = pool_priv_size;
852 priv->type = RTE_CRYPTO_OP_TYPE_SYMMETRIC;
854 data->crypto_op = mp;
859 static clib_error_t *
860 crypto_create_session_h_pool (vlib_main_t * vm, u8 numa)
862 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
865 struct rte_mempool *mp;
868 data = vec_elt_at_index (dcm->data, numa);
873 pool_name = format (0, "session_h_pool_numa%u%c", numa, 0);
876 elt_size = rte_cryptodev_sym_get_header_session_size ();
878 #if RTE_VERSION < RTE_VERSION_NUM(19, 2, 0, 0)
879 mp = rte_mempool_create ((char *) pool_name, DPDK_CRYPTO_NB_SESS_OBJS,
880 elt_size, 512, 0, NULL, NULL, NULL, NULL, numa, 0);
882 /* XXX Experimental tag in DPDK 19.02 */
883 mp = rte_cryptodev_sym_session_pool_create ((char *) pool_name,
884 DPDK_CRYPTO_NB_SESS_OBJS,
885 elt_size, 512, 0, numa);
887 vec_free (pool_name);
890 return clib_error_return (0, "failed to create crypto session mempool");
892 data->session_h = mp;
897 static clib_error_t *
898 crypto_create_session_drv_pool (vlib_main_t * vm, crypto_dev_t * dev)
900 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
903 struct rte_mempool *mp;
907 data = vec_elt_at_index (dcm->data, numa);
909 vec_validate (data->session_drv, dev->drv_id);
910 vec_validate (data->session_drv_failed, dev->drv_id);
911 vec_validate_aligned (data->session_by_drv_id_and_sa_index, 32,
912 CLIB_CACHE_LINE_BYTES);
914 if (data->session_drv[dev->drv_id])
917 pool_name = format (0, "session_drv%u_pool_numa%u%c", dev->drv_id, numa, 0);
919 elt_size = rte_cryptodev_sym_get_private_session_size (dev->id);
921 rte_mempool_create ((char *) pool_name, DPDK_CRYPTO_NB_SESS_OBJS,
922 elt_size, 512, 0, NULL, NULL, NULL, NULL, numa, 0);
924 vec_free (pool_name);
927 return clib_error_return (0, "failed to create session drv mempool");
929 data->session_drv[dev->drv_id] = mp;
930 clib_spinlock_init (&data->lockp);
935 static clib_error_t *
936 crypto_create_pools (vlib_main_t * vm)
938 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
939 clib_error_t *error = NULL;
943 vec_foreach (dev, dcm->dev)
945 vec_validate_aligned (dcm->data, dev->numa, CLIB_CACHE_LINE_BYTES);
947 error = crypto_create_crypto_op_pool (vm, dev->numa);
951 error = crypto_create_session_h_pool (vm, dev->numa);
955 error = crypto_create_session_drv_pool (vm, dev);
965 crypto_disable (void)
967 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
974 vec_foreach (data, dcm->data)
976 rte_mempool_free (data->crypto_op);
977 rte_mempool_free (data->session_h);
979 vec_foreach_index (i, data->session_drv)
980 rte_mempool_free (data->session_drv[i]);
982 vec_free (data->session_drv);
983 clib_spinlock_free (&data->lockp);
987 vec_free (dcm->data);
988 vec_free (dcm->workers_main);
990 vec_free (dcm->resource);
991 vec_free (dcm->cipher_algs);
992 vec_free (dcm->auth_algs);
995 static clib_error_t *
996 dpdk_ipsec_enable_disable (int is_enable)
998 vlib_main_t *vm = vlib_get_main ();
999 vlib_thread_main_t *tm = vlib_get_thread_main ();
1000 vlib_node_t *node = vlib_get_node_by_name (vm, (u8 *) "dpdk-crypto-input");
1001 u32 skip_master = vlib_num_workers () > 0;
1002 u32 n_mains = tm->n_vlib_mains;
1006 for (i = skip_master; i < n_mains; i++)
1007 vlib_node_set_state (vlib_mains[i], node->index, is_enable != 0 ?
1008 VLIB_NODE_STATE_POLLING : VLIB_NODE_STATE_DISABLED);
1013 static clib_error_t *
1014 dpdk_ipsec_main_init (vlib_main_t * vm)
1016 ipsec_main_t *im = &ipsec_main;
1017 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
1018 vlib_thread_main_t *tm = vlib_get_thread_main ();
1019 crypto_worker_main_t *cwm;
1020 clib_error_t *error = NULL;
1021 u32 skip_master, n_mains;
1023 n_mains = tm->n_vlib_mains;
1024 skip_master = vlib_num_workers () > 0;
1026 algos_init (n_mains - skip_master);
1028 crypto_scan_devs (n_mains - skip_master);
1030 if (!(dcm->enabled))
1032 vlib_log_warn (dpdk_main.log_default,
1033 "not enough DPDK crypto resources");
1038 dcm->session_timeout = 10e9;
1040 vec_validate_init_empty_aligned (dcm->workers_main, n_mains - 1,
1041 (crypto_worker_main_t) EMPTY_STRUCT,
1042 CLIB_CACHE_LINE_BYTES);
1045 vec_foreach (cwm, dcm->workers_main)
1047 vec_validate_init_empty_aligned (cwm->ops, VLIB_FRAME_SIZE - 1, 0,
1048 CLIB_CACHE_LINE_BYTES);
1049 clib_memset (cwm->cipher_resource_idx, ~0,
1050 IPSEC_CRYPTO_N_ALG * sizeof(*cwm->cipher_resource_idx));
1051 clib_memset (cwm->auth_resource_idx, ~0,
1052 IPSEC_INTEG_N_ALG * sizeof(*cwm->auth_resource_idx));
1056 crypto_auto_placement ();
1058 error = crypto_create_pools (vm);
1061 clib_error_report (error);
1067 u32 idx = ipsec_register_esp_backend (vm, im, "dpdk backend",
1068 "dpdk-esp4-encrypt",
1069 "dpdk-esp4-encrypt-tun",
1070 "dpdk-esp4-decrypt",
1071 "dpdk-esp4-decrypt",
1072 "dpdk-esp6-encrypt",
1073 "dpdk-esp6-encrypt-tun",
1074 "dpdk-esp6-decrypt",
1075 "dpdk-esp6-decrypt",
1076 dpdk_ipsec_check_support,
1078 dpdk_ipsec_enable_disable);
1080 if (im->esp_current_backend == ~0)
1082 rv = ipsec_select_esp_backend (im, idx);
1088 VLIB_MAIN_LOOP_ENTER_FUNCTION (dpdk_ipsec_main_init);
1091 * fd.io coding-style-patch-verification: ON
1094 * eval: (c-set-style "gnu")