2 * Copyright (c) 2017 Intel and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
15 #include <vnet/vnet.h>
16 #include <vnet/ip/ip.h>
17 #include <vnet/api_errno.h>
18 #include <vnet/ipsec/ipsec.h>
19 #include <vlib/node_funcs.h>
21 #include <dpdk/device/dpdk.h>
22 #include <dpdk/ipsec/ipsec.h>
24 dpdk_crypto_main_t dpdk_crypto_main;
26 #define EMPTY_STRUCT {0}
27 #define NUM_CRYPTO_MBUFS 16384
30 algos_init (u32 n_mains)
32 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
35 vec_validate_aligned (dcm->cipher_algs, IPSEC_CRYPTO_N_ALG - 1, 8);
39 dcm->cipher_algs[IPSEC_CRYPTO_ALG_##f].name = str; \
40 dcm->cipher_algs[IPSEC_CRYPTO_ALG_##f].disabled = n_mains;
41 foreach_ipsec_crypto_alg
45 /* Minimum boundary for ciphers is 4B, required by ESP */
46 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_NONE];
47 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
48 a->alg = RTE_CRYPTO_CIPHER_NULL;
49 a->boundary = 4; /* 1 */
53 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_128];
54 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
55 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
60 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_192];
61 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
62 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
67 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_256];
68 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
69 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
74 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_128];
75 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
76 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
77 a->boundary = 4; /* 1 */
81 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_192];
82 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
83 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
84 a->boundary = 4; /* 1 */
88 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_256];
89 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
90 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
91 a->boundary = 4; /* 1 */
95 #define AES_GCM_TYPE RTE_CRYPTO_SYM_XFORM_AEAD
96 #define AES_GCM_ALG RTE_CRYPTO_AEAD_AES_GCM
98 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_128];
99 a->type = AES_GCM_TYPE;
100 a->alg = AES_GCM_ALG;
101 a->boundary = 4; /* 1 */
106 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_192];
107 a->type = AES_GCM_TYPE;
108 a->alg = AES_GCM_ALG;
109 a->boundary = 4; /* 1 */
114 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_256];
115 a->type = AES_GCM_TYPE;
116 a->alg = AES_GCM_ALG;
117 a->boundary = 4; /* 1 */
122 vec_validate (dcm->auth_algs, IPSEC_INTEG_N_ALG - 1);
126 dcm->auth_algs[IPSEC_INTEG_ALG_##f].name = str; \
127 dcm->auth_algs[IPSEC_INTEG_ALG_##f].disabled = n_mains;
128 foreach_ipsec_integ_alg
132 a = &dcm->auth_algs[IPSEC_INTEG_ALG_NONE];
133 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
134 a->alg = RTE_CRYPTO_AUTH_NULL;
138 a = &dcm->auth_algs[IPSEC_INTEG_ALG_MD5_96];
139 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
140 a->alg = RTE_CRYPTO_AUTH_MD5_HMAC;
144 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA1_96];
145 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
146 a->alg = RTE_CRYPTO_AUTH_SHA1_HMAC;
150 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_256_96];
151 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
152 a->alg = RTE_CRYPTO_AUTH_SHA256_HMAC;
156 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_256_128];
157 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
158 a->alg = RTE_CRYPTO_AUTH_SHA256_HMAC;
162 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_384_192];
163 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
164 a->alg = RTE_CRYPTO_AUTH_SHA384_HMAC;
168 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_512_256];
169 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
170 a->alg = RTE_CRYPTO_AUTH_SHA512_HMAC;
176 cipher_alg_index (const crypto_alg_t * alg)
178 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
180 return (alg - dcm->cipher_algs);
184 auth_alg_index (const crypto_alg_t * alg)
186 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
188 return (alg - dcm->auth_algs);
191 static crypto_alg_t *
192 cipher_cap_to_alg (const struct rte_cryptodev_capabilities *cap, u8 key_len)
194 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
197 if (cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
201 vec_foreach (alg, dcm->cipher_algs)
203 if ((cap->sym.xform_type == RTE_CRYPTO_SYM_XFORM_CIPHER) &&
204 (alg->type == RTE_CRYPTO_SYM_XFORM_CIPHER) &&
205 (cap->sym.cipher.algo == alg->alg) &&
206 (alg->key_len == key_len))
208 if ((cap->sym.xform_type == RTE_CRYPTO_SYM_XFORM_AEAD) &&
209 (alg->type == RTE_CRYPTO_SYM_XFORM_AEAD) &&
210 (cap->sym.aead.algo == alg->alg) &&
211 (alg->key_len == key_len))
219 static crypto_alg_t *
220 auth_cap_to_alg (const struct rte_cryptodev_capabilities *cap, u8 trunc_size)
222 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
225 if ((cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC) ||
226 (cap->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH))
230 vec_foreach (alg, dcm->auth_algs)
232 if ((cap->sym.auth.algo == alg->alg) &&
233 (alg->trunc_size == trunc_size))
242 crypto_set_aead_xform (struct rte_crypto_sym_xform *xform,
243 ipsec_sa_t * sa, u8 is_outbound)
245 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
248 c = vec_elt_at_index (dcm->cipher_algs, sa->crypto_alg);
250 ASSERT (c->type == RTE_CRYPTO_SYM_XFORM_AEAD);
252 xform->type = RTE_CRYPTO_SYM_XFORM_AEAD;
253 xform->aead.algo = c->alg;
254 xform->aead.key.data = sa->crypto_key;
255 xform->aead.key.length = c->key_len;
256 xform->aead.iv.offset =
257 crypto_op_get_priv_offset () + offsetof (dpdk_op_priv_t, cb);
258 xform->aead.iv.length = 12;
259 xform->aead.digest_length = c->trunc_size;
260 xform->aead.aad_length = sa->use_esn ? 12 : 8;
264 xform->aead.op = RTE_CRYPTO_AEAD_OP_ENCRYPT;
266 xform->aead.op = RTE_CRYPTO_AEAD_OP_DECRYPT;
270 crypto_set_cipher_xform (struct rte_crypto_sym_xform *xform,
271 ipsec_sa_t * sa, u8 is_outbound)
273 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
276 c = vec_elt_at_index (dcm->cipher_algs, sa->crypto_alg);
278 ASSERT (c->type == RTE_CRYPTO_SYM_XFORM_CIPHER);
280 xform->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
281 xform->cipher.algo = c->alg;
282 xform->cipher.key.data = sa->crypto_key;
283 xform->cipher.key.length = c->key_len;
284 xform->cipher.iv.offset =
285 crypto_op_get_priv_offset () + offsetof (dpdk_op_priv_t, cb);
286 xform->cipher.iv.length = c->iv_len;
290 xform->cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
292 xform->cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT;
296 crypto_set_auth_xform (struct rte_crypto_sym_xform *xform,
297 ipsec_sa_t * sa, u8 is_outbound)
299 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
302 a = vec_elt_at_index (dcm->auth_algs, sa->integ_alg);
304 ASSERT (a->type == RTE_CRYPTO_SYM_XFORM_AUTH);
306 xform->type = RTE_CRYPTO_SYM_XFORM_AUTH;
307 xform->auth.algo = a->alg;
308 xform->auth.key.data = sa->integ_key;
309 xform->auth.key.length = a->key_len;
310 xform->auth.digest_length = a->trunc_size;
314 xform->auth.op = RTE_CRYPTO_AUTH_OP_GENERATE;
316 xform->auth.op = RTE_CRYPTO_AUTH_OP_VERIFY;
320 create_sym_session (struct rte_cryptodev_sym_session **session,
322 crypto_resource_t * res,
323 crypto_worker_main_t * cwm, u8 is_outbound)
325 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
326 ipsec_main_t *im = &ipsec_main;
329 struct rte_crypto_sym_xform cipher_xform = { 0 };
330 struct rte_crypto_sym_xform auth_xform = { 0 };
331 struct rte_crypto_sym_xform *xfs;
332 struct rte_cryptodev_sym_session **s;
333 clib_error_t *erorr = 0;
336 sa = pool_elt_at_index (im->sad, sa_idx);
338 if ((sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_128) |
339 (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_192) |
340 (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_256))
342 crypto_set_aead_xform (&cipher_xform, sa, is_outbound);
347 crypto_set_cipher_xform (&cipher_xform, sa, is_outbound);
348 crypto_set_auth_xform (&auth_xform, sa, is_outbound);
352 cipher_xform.next = &auth_xform;
357 auth_xform.next = &cipher_xform;
362 data = vec_elt_at_index (dcm->data, res->numa);
363 clib_spinlock_lock_if_init (&data->lockp);
367 * Multiple worker/threads share the session for an SA
368 * Single session per SA, initialized for each device driver
370 s = (void *) hash_get (data->session_by_sa_index, sa_idx);
374 session[0] = rte_cryptodev_sym_session_create (data->session_h);
377 data->session_h_failed += 1;
378 erorr = clib_error_return (0, "failed to create session header");
381 hash_set (data->session_by_sa_index, sa_idx, session[0]);
386 struct rte_mempool **mp;
387 mp = vec_elt_at_index (data->session_drv, res->drv_id);
388 ASSERT (mp[0] != NULL);
391 rte_cryptodev_sym_session_init (res->dev_id, session[0], xfs, mp[0]);
394 data->session_drv_failed[res->drv_id] += 1;
395 erorr = clib_error_return (0, "failed to init session for drv %u",
400 add_session_by_drv_and_sa_idx (session[0], data, res->drv_id, sa_idx);
403 clib_spinlock_unlock_if_init (&data->lockp);
407 static void __attribute__ ((unused)) clear_and_free_obj (void *obj)
409 struct rte_mempool *mp = rte_mempool_from_obj (obj);
411 clib_memset (obj, 0, mp->elt_size);
413 rte_mempool_put (mp, obj);
416 /* This is from rte_cryptodev_pmd.h */
418 get_session_private_data (const struct rte_cryptodev_sym_session *sess,
421 return sess->sess_private_data[driver_id];
424 /* This is from rte_cryptodev_pmd.h */
426 set_session_private_data (struct rte_cryptodev_sym_session *sess,
427 uint8_t driver_id, void *private_data)
429 sess->sess_private_data[driver_id] = private_data;
432 static clib_error_t *
433 dpdk_crypto_session_disposal (crypto_session_disposal_t * v, u64 ts)
435 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
436 crypto_session_disposal_t *s;
444 /* ordered vector by timestamp */
445 if (!(s->ts + dcm->session_timeout < ts))
448 vec_foreach_index (drv_id, dcm->drv)
450 drv_session = get_session_private_data (s->session, drv_id);
455 * Custom clear to avoid finding a dev_id for drv_id:
456 * ret = rte_cryptodev_sym_session_clear (dev_id, drv_session);
459 clear_and_free_obj (drv_session);
461 set_session_private_data (s->session, drv_id, NULL);
464 if (rte_mempool_from_obj(s->session))
466 ret = rte_cryptodev_sym_session_free (s->session);
473 vec_delete (v, s - v, 0);
475 vec_reset_length (v);
480 static clib_error_t *
481 add_del_sa_session (u32 sa_index, u8 is_add)
483 ipsec_main_t *im = &ipsec_main;
484 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
486 struct rte_cryptodev_sym_session *s;
493 ipsec_sa_t *sa = pool_elt_at_index (im->sad, sa_index);
495 switch (sa->crypto_alg)
497 case IPSEC_CRYPTO_ALG_AES_GCM_128:
498 case IPSEC_CRYPTO_ALG_AES_GCM_192:
499 case IPSEC_CRYPTO_ALG_AES_GCM_256:
500 clib_memcpy (&sa->salt, &sa->crypto_key[sa->crypto_key_len - 4], 4);
503 seed = (u32) clib_cpu_time_now ();
504 sa->salt = random_u32 (&seed);
511 vec_foreach (data, dcm->data)
513 clib_spinlock_lock_if_init (&data->lockp);
514 val = hash_get (data->session_by_sa_index, sa_index);
517 s = (struct rte_cryptodev_sym_session *) val[0];
518 vec_foreach_index (drv_id, dcm->drv)
520 val = (uword*) get_session_by_drv_and_sa_idx (data, drv_id, sa_index);
522 add_session_by_drv_and_sa_idx(NULL, data, drv_id, sa_index);
525 hash_unset (data->session_by_sa_index, sa_index);
527 u64 ts = unix_time_now_nsec ();
528 dpdk_crypto_session_disposal (data->session_disposal, ts);
530 crypto_session_disposal_t sd;
534 vec_add1 (data->session_disposal, sd);
536 clib_spinlock_unlock_if_init (&data->lockp);
543 static clib_error_t *
544 dpdk_ipsec_check_support (ipsec_sa_t * sa)
546 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
548 if (sa->integ_alg == IPSEC_INTEG_ALG_NONE)
549 switch (sa->crypto_alg)
551 case IPSEC_CRYPTO_ALG_NONE:
552 case IPSEC_CRYPTO_ALG_AES_GCM_128:
553 case IPSEC_CRYPTO_ALG_AES_GCM_192:
554 case IPSEC_CRYPTO_ALG_AES_GCM_256:
557 return clib_error_return (0, "unsupported integ-alg %U crypto-alg %U",
558 format_ipsec_integ_alg, sa->integ_alg,
559 format_ipsec_crypto_alg, sa->crypto_alg);
562 /* XXX do we need the NONE check? */
563 if (sa->crypto_alg != IPSEC_CRYPTO_ALG_NONE &&
564 dcm->cipher_algs[sa->crypto_alg].disabled)
565 return clib_error_return (0, "disabled crypto-alg %U",
566 format_ipsec_crypto_alg, sa->crypto_alg);
568 /* XXX do we need the NONE check? */
569 if (sa->integ_alg != IPSEC_INTEG_ALG_NONE &&
570 dcm->auth_algs[sa->integ_alg].disabled)
571 return clib_error_return (0, "disabled integ-alg %U",
572 format_ipsec_integ_alg, sa->integ_alg);
577 crypto_parse_capabilities (crypto_dev_t * dev,
578 const struct rte_cryptodev_capabilities *cap,
581 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
585 for (; cap->op != RTE_CRYPTO_OP_TYPE_UNDEFINED; cap++)
587 /* A single capability maps to multiple cipher/auth algorithms */
588 switch (cap->sym.xform_type)
590 case RTE_CRYPTO_SYM_XFORM_AEAD:
591 case RTE_CRYPTO_SYM_XFORM_CIPHER:
592 inc = cap->sym.cipher.key_size.increment;
594 for (len = cap->sym.cipher.key_size.min;
595 len <= cap->sym.cipher.key_size.max; len += inc)
597 alg = cipher_cap_to_alg (cap, len);
600 dev->cipher_support[cipher_alg_index (alg)] = 1;
601 alg->resources += vec_len (dev->free_resources);
602 /* At least enough resources to support one algo */
603 dcm->enabled |= (alg->resources >= n_mains);
606 case RTE_CRYPTO_SYM_XFORM_AUTH:
607 inc = cap->sym.auth.digest_size.increment;
609 for (len = cap->sym.auth.digest_size.min;
610 len <= cap->sym.auth.digest_size.max; len += inc)
612 alg = auth_cap_to_alg (cap, len);
615 dev->auth_support[auth_alg_index (alg)] = 1;
616 alg->resources += vec_len (dev->free_resources);
617 /* At least enough resources to support one algo */
618 dcm->enabled |= (alg->resources >= n_mains);
627 #define DPDK_CRYPTO_N_QUEUE_DESC 2048
628 #define DPDK_CRYPTO_NB_SESS_OBJS 20000
630 static clib_error_t *
631 crypto_dev_conf (u8 dev, u16 n_qp, u8 numa)
633 struct rte_cryptodev_config dev_conf;
634 struct rte_cryptodev_qp_conf qp_conf;
639 dev_conf.socket_id = numa;
640 dev_conf.nb_queue_pairs = n_qp;
642 error_str = "failed to configure crypto device %u";
643 ret = rte_cryptodev_configure (dev, &dev_conf);
645 return clib_error_return (0, error_str, dev);
647 error_str = "failed to setup crypto device %u queue pair %u";
648 qp_conf.nb_descriptors = DPDK_CRYPTO_N_QUEUE_DESC;
649 for (qp = 0; qp < n_qp; qp++)
651 ret = rte_cryptodev_queue_pair_setup (dev, qp, &qp_conf, numa, NULL);
653 return clib_error_return (0, error_str, dev, qp);
656 error_str = "failed to start crypto device %u";
657 if (rte_cryptodev_start (dev))
658 return clib_error_return (0, error_str, dev);
664 crypto_scan_devs (u32 n_mains)
666 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
667 struct rte_cryptodev *cryptodev;
668 struct rte_cryptodev_info info;
670 crypto_resource_t *res;
673 u16 max_res_idx, res_idx, j;
676 vec_validate_init_empty (dcm->dev, rte_cryptodev_count () - 1,
677 (crypto_dev_t) EMPTY_STRUCT);
679 for (i = 0; i < rte_cryptodev_count (); i++)
681 dev = vec_elt_at_index (dcm->dev, i);
683 cryptodev = &rte_cryptodevs[i];
684 rte_cryptodev_info_get (i, &info);
687 dev->name = cryptodev->data->name;
688 dev->numa = rte_cryptodev_socket_id (i);
689 dev->features = info.feature_flags;
690 dev->max_qp = info.max_nb_queue_pairs;
691 drv_id = info.driver_id;
692 if (drv_id >= vec_len (dcm->drv))
693 vec_validate_init_empty (dcm->drv, drv_id,
694 (crypto_drv_t) EMPTY_STRUCT);
695 vec_elt_at_index (dcm->drv, drv_id)->name = info.driver_name;
696 dev->drv_id = drv_id;
697 vec_add1 (vec_elt_at_index (dcm->drv, drv_id)->devs, i);
699 if (!(info.feature_flags & RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING))
702 if ((error = crypto_dev_conf (i, dev->max_qp, dev->numa)))
704 clib_error_report (error);
708 max_res_idx = (dev->max_qp / 2) - 1;
710 vec_validate (dev->free_resources, max_res_idx);
712 res_idx = vec_len (dcm->resource);
713 vec_validate_init_empty_aligned (dcm->resource, res_idx + max_res_idx,
714 (crypto_resource_t) EMPTY_STRUCT,
715 CLIB_CACHE_LINE_BYTES);
717 for (j = 0; j <= max_res_idx; j++, res_idx++)
719 vec_elt (dev->free_resources, max_res_idx - j) = res_idx;
720 res = &dcm->resource[res_idx];
722 res->drv_id = drv_id;
724 res->numa = dev->numa;
725 res->thread_idx = (u16) ~ 0;
728 crypto_parse_capabilities (dev, info.capabilities, n_mains);
733 crypto_auto_placement (void)
735 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
736 crypto_resource_t *res;
737 crypto_worker_main_t *cwm;
739 u32 thread_idx, skip_master;
744 skip_master = vlib_num_workers () > 0;
747 vec_foreach (dev, dcm->dev)
749 vec_foreach_index (thread_idx, dcm->workers_main)
751 if (vec_len (dev->free_resources) == 0)
754 if (thread_idx < skip_master)
757 /* Check thread is not already using the device */
758 vec_foreach (idx, dev->used_resources)
759 if (dcm->resource[idx[0]].thread_idx == thread_idx)
762 cwm = vec_elt_at_index (dcm->workers_main, thread_idx);
765 res_idx = vec_pop (dev->free_resources);
767 /* Set device only for supported algos */
768 for (i = 0; i < IPSEC_CRYPTO_N_ALG; i++)
769 if (dev->cipher_support[i] &&
770 cwm->cipher_resource_idx[i] == (u16) ~0)
772 dcm->cipher_algs[i].disabled--;
773 cwm->cipher_resource_idx[i] = res_idx;
777 for (i = 0; i < IPSEC_INTEG_N_ALG; i++)
778 if (dev->auth_support[i] &&
779 cwm->auth_resource_idx[i] == (u16) ~0)
781 dcm->auth_algs[i].disabled--;
782 cwm->auth_resource_idx[i] = res_idx;
788 vec_add1 (dev->free_resources, res_idx);
792 vec_add1 (dev->used_resources, res_idx);
794 res = vec_elt_at_index (dcm->resource, res_idx);
796 ASSERT (res->thread_idx == (u16) ~0);
797 res->thread_idx = thread_idx;
799 /* Add device to vector of polling resources */
800 vec_add1 (cwm->resource_idx, res_idx);
807 crypto_op_init (struct rte_mempool *mempool,
808 void *_arg __attribute__ ((unused)),
809 void *_obj, unsigned i __attribute__ ((unused)))
811 struct rte_crypto_op *op = _obj;
813 op->sess_type = RTE_CRYPTO_OP_WITH_SESSION;
814 op->type = RTE_CRYPTO_OP_TYPE_SYMMETRIC;
815 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
816 op->phys_addr = rte_mempool_virt2iova (_obj);
817 op->mempool = mempool;
820 static clib_error_t *
821 crypto_create_crypto_op_pool (vlib_main_t * vm, u8 numa)
823 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
824 dpdk_config_main_t *conf = &dpdk_config_main;
827 u32 pool_priv_size = sizeof (struct rte_crypto_op_pool_private);
828 struct rte_crypto_op_pool_private *priv;
829 struct rte_mempool *mp;
831 data = vec_elt_at_index (dcm->data, numa);
833 /* Already allocated */
837 pool_name = format (0, "crypto_pool_numa%u%c", numa, 0);
839 if (conf->num_crypto_mbufs == 0)
840 conf->num_crypto_mbufs = NUM_CRYPTO_MBUFS;
842 mp = rte_mempool_create ((char *) pool_name, conf->num_crypto_mbufs,
843 crypto_op_len (), 512, pool_priv_size, NULL, NULL,
844 crypto_op_init, NULL, numa, 0);
846 vec_free (pool_name);
849 return clib_error_return (0, "failed to create crypto op mempool");
851 /* Initialize mempool private data */
852 priv = rte_mempool_get_priv (mp);
853 priv->priv_size = pool_priv_size;
854 priv->type = RTE_CRYPTO_OP_TYPE_SYMMETRIC;
856 data->crypto_op = mp;
861 static clib_error_t *
862 crypto_create_session_h_pool (vlib_main_t * vm, u8 numa)
864 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
867 struct rte_mempool *mp;
870 data = vec_elt_at_index (dcm->data, numa);
875 pool_name = format (0, "session_h_pool_numa%u%c", numa, 0);
878 elt_size = rte_cryptodev_sym_get_header_session_size ();
881 rte_mempool_create ((char *) pool_name, DPDK_CRYPTO_NB_SESS_OBJS,
882 elt_size, 512, 0, NULL, NULL, NULL, NULL, numa, 0);
884 vec_free (pool_name);
887 return clib_error_return (0, "failed to create crypto session mempool");
889 data->session_h = mp;
894 static clib_error_t *
895 crypto_create_session_drv_pool (vlib_main_t * vm, crypto_dev_t * dev)
897 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
900 struct rte_mempool *mp;
904 data = vec_elt_at_index (dcm->data, numa);
906 vec_validate (data->session_drv, dev->drv_id);
907 vec_validate (data->session_drv_failed, dev->drv_id);
908 vec_validate_aligned (data->session_by_drv_id_and_sa_index, 32,
909 CLIB_CACHE_LINE_BYTES);
911 if (data->session_drv[dev->drv_id])
914 pool_name = format (0, "session_drv%u_pool_numa%u%c", dev->drv_id, numa, 0);
916 elt_size = rte_cryptodev_sym_get_private_session_size (dev->id);
918 rte_mempool_create ((char *) pool_name, DPDK_CRYPTO_NB_SESS_OBJS,
919 elt_size, 512, 0, NULL, NULL, NULL, NULL, numa, 0);
921 vec_free (pool_name);
924 return clib_error_return (0, "failed to create session drv mempool");
926 data->session_drv[dev->drv_id] = mp;
927 clib_spinlock_init (&data->lockp);
932 static clib_error_t *
933 crypto_create_pools (vlib_main_t * vm)
935 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
936 clib_error_t *error = NULL;
940 vec_foreach (dev, dcm->dev)
942 vec_validate_aligned (dcm->data, dev->numa, CLIB_CACHE_LINE_BYTES);
944 error = crypto_create_crypto_op_pool (vm, dev->numa);
948 error = crypto_create_session_h_pool (vm, dev->numa);
952 error = crypto_create_session_drv_pool (vm, dev);
962 crypto_disable (void)
964 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
971 vec_foreach (data, dcm->data)
973 rte_mempool_free (data->crypto_op);
974 rte_mempool_free (data->session_h);
976 vec_foreach_index (i, data->session_drv)
977 rte_mempool_free (data->session_drv[i]);
979 vec_free (data->session_drv);
980 clib_spinlock_free (&data->lockp);
984 vec_free (dcm->data);
985 vec_free (dcm->workers_main);
987 vec_free (dcm->resource);
988 vec_free (dcm->cipher_algs);
989 vec_free (dcm->auth_algs);
993 dpdk_ipsec_process (vlib_main_t * vm, vlib_node_runtime_t * rt,
996 ipsec_main_t *im = &ipsec_main;
997 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
998 vlib_thread_main_t *tm = vlib_get_thread_main ();
999 crypto_worker_main_t *cwm;
1000 clib_error_t *error = NULL;
1001 u32 i, skip_master, n_mains;
1003 n_mains = tm->n_vlib_mains;
1004 skip_master = vlib_num_workers () > 0;
1006 algos_init (n_mains - skip_master);
1008 crypto_scan_devs (n_mains - skip_master);
1010 if (!(dcm->enabled))
1012 clib_warning ("not enough DPDK crypto resources, default to OpenSSL");
1017 dcm->session_timeout = 10e9;
1019 vec_validate_init_empty_aligned (dcm->workers_main, n_mains - 1,
1020 (crypto_worker_main_t) EMPTY_STRUCT,
1021 CLIB_CACHE_LINE_BYTES);
1024 vec_foreach (cwm, dcm->workers_main)
1026 vec_validate_init_empty_aligned (cwm->ops, VLIB_FRAME_SIZE - 1, 0,
1027 CLIB_CACHE_LINE_BYTES);
1028 clib_memset (cwm->cipher_resource_idx, ~0,
1029 IPSEC_CRYPTO_N_ALG * sizeof(*cwm->cipher_resource_idx));
1030 clib_memset (cwm->auth_resource_idx, ~0,
1031 IPSEC_INTEG_N_ALG * sizeof(*cwm->auth_resource_idx));
1035 crypto_auto_placement ();
1037 error = crypto_create_pools (vm);
1040 clib_error_report (error);
1046 u32 idx = ipsec_register_esp_backend (vm, im, "dpdk backend",
1047 "dpdk-esp4-encrypt",
1048 "dpdk-esp4-decrypt",
1049 "dpdk-esp6-encrypt",
1050 "dpdk-esp6-decrypt",
1051 dpdk_ipsec_check_support,
1052 add_del_sa_session);
1053 int rv = ipsec_select_esp_backend (im, idx);
1056 vlib_node_t *node = vlib_get_node_by_name (vm, (u8 *) "dpdk-crypto-input");
1058 for (i = skip_master; i < n_mains; i++)
1059 vlib_node_set_state (vlib_mains[i], node->index, VLIB_NODE_STATE_POLLING);
1064 VLIB_REGISTER_NODE (dpdk_ipsec_process_node,static) = {
1065 .function = dpdk_ipsec_process,
1066 .type = VLIB_NODE_TYPE_PROCESS,
1067 .name = "dpdk-ipsec-process",
1068 .process_log2_n_stack_bytes = 17,
1073 * fd.io coding-style-patch-verification: ON
1076 * eval: (c-set-style "gnu")