2 * Copyright (c) 2017 Intel and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
15 #include <vnet/vnet.h>
16 #include <vnet/ip/ip.h>
17 #include <vnet/api_errno.h>
18 #include <vnet/ipsec/ipsec.h>
19 #include <vlib/node_funcs.h>
21 #include <dpdk/device/dpdk.h>
22 #include <dpdk/ipsec/ipsec.h>
24 dpdk_crypto_main_t dpdk_crypto_main;
26 #define EMPTY_STRUCT {0}
29 algos_init (u32 n_mains)
31 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
34 vec_validate_aligned (dcm->cipher_algs, IPSEC_CRYPTO_N_ALG - 1, 8);
38 dcm->cipher_algs[IPSEC_CRYPTO_ALG_##f].name = str; \
39 dcm->cipher_algs[IPSEC_CRYPTO_ALG_##f].disabled = n_mains;
40 foreach_ipsec_crypto_alg
44 /* Minimum boundary for ciphers is 4B, required by ESP */
45 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_NONE];
46 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
47 a->alg = RTE_CRYPTO_CIPHER_NULL;
48 a->boundary = 4; /* 1 */
52 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_128];
53 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
54 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
59 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_192];
60 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
61 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
66 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_256];
67 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
68 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
73 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_128];
74 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
75 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
76 a->boundary = 4; /* 1 */
80 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_192];
81 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
82 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
83 a->boundary = 4; /* 1 */
87 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_256];
88 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
89 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
90 a->boundary = 4; /* 1 */
95 #define AES_GCM_TYPE RTE_CRYPTO_SYM_XFORM_CIPHER
96 #define AES_GCM_ALG RTE_CRYPTO_CIPHER_AES_GCM
98 #define AES_GCM_TYPE RTE_CRYPTO_SYM_XFORM_AEAD
99 #define AES_GCM_ALG RTE_CRYPTO_AEAD_AES_GCM
102 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_128];
103 a->type = AES_GCM_TYPE;
104 a->alg = AES_GCM_ALG;
105 a->boundary = 4; /* 1 */
110 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_192];
111 a->type = AES_GCM_TYPE;
112 a->alg = AES_GCM_ALG;
113 a->boundary = 4; /* 1 */
118 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_256];
119 a->type = AES_GCM_TYPE;
120 a->alg = AES_GCM_ALG;
121 a->boundary = 4; /* 1 */
126 vec_validate (dcm->auth_algs, IPSEC_INTEG_N_ALG - 1);
130 dcm->auth_algs[IPSEC_INTEG_ALG_##f].name = str; \
131 dcm->auth_algs[IPSEC_INTEG_ALG_##f].disabled = n_mains;
132 foreach_ipsec_integ_alg
136 a = &dcm->auth_algs[IPSEC_INTEG_ALG_NONE];
137 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
138 a->alg = RTE_CRYPTO_AUTH_NULL;
142 a = &dcm->auth_algs[IPSEC_INTEG_ALG_MD5_96];
143 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
144 a->alg = RTE_CRYPTO_AUTH_MD5_HMAC;
148 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA1_96];
149 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
150 a->alg = RTE_CRYPTO_AUTH_SHA1_HMAC;
154 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_256_96];
155 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
156 a->alg = RTE_CRYPTO_AUTH_SHA256_HMAC;
160 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_256_128];
161 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
162 a->alg = RTE_CRYPTO_AUTH_SHA256_HMAC;
166 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_384_192];
167 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
168 a->alg = RTE_CRYPTO_AUTH_SHA384_HMAC;
172 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_512_256];
173 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
174 a->alg = RTE_CRYPTO_AUTH_SHA512_HMAC;
180 cipher_alg_index (const crypto_alg_t * alg)
182 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
184 return (alg - dcm->cipher_algs);
188 auth_alg_index (const crypto_alg_t * alg)
190 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
192 return (alg - dcm->auth_algs);
195 static crypto_alg_t *
196 cipher_cap_to_alg (const struct rte_cryptodev_capabilities *cap, u8 key_len)
198 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
201 if (cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
205 vec_foreach (alg, dcm->cipher_algs)
207 if ((cap->sym.xform_type == RTE_CRYPTO_SYM_XFORM_CIPHER) &&
208 (alg->type == RTE_CRYPTO_SYM_XFORM_CIPHER) &&
209 (cap->sym.cipher.algo == alg->alg) &&
210 (alg->key_len == key_len))
213 if ((cap->sym.xform_type == RTE_CRYPTO_SYM_XFORM_AEAD) &&
214 (alg->type == RTE_CRYPTO_SYM_XFORM_AEAD) &&
215 (cap->sym.aead.algo == alg->alg) &&
216 (alg->key_len == key_len))
225 static crypto_alg_t *
226 auth_cap_to_alg (const struct rte_cryptodev_capabilities *cap, u8 trunc_size)
228 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
231 if ((cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC) ||
232 (cap->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH))
236 vec_foreach (alg, dcm->auth_algs)
238 if ((cap->sym.auth.algo == alg->alg) &&
239 (alg->trunc_size == trunc_size))
249 crypto_set_aead_xform (struct rte_crypto_sym_xform *xform,
250 ipsec_sa_t * sa, u8 is_outbound)
252 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
255 c = vec_elt_at_index (dcm->cipher_algs, sa->crypto_alg);
257 ASSERT (c->type == RTE_CRYPTO_SYM_XFORM_AEAD);
259 xform->type = RTE_CRYPTO_SYM_XFORM_AEAD;
260 xform->aead.algo = c->alg;
261 xform->aead.key.data = sa->crypto_key;
262 xform->aead.key.length = c->key_len;
263 xform->aead.iv.offset =
264 crypto_op_get_priv_offset () + offsetof (dpdk_op_priv_t, cb);
265 xform->aead.iv.length = 12;
266 xform->aead.digest_length = c->trunc_size;
267 xform->aead.aad_length = sa->use_esn ? 12 : 8;
271 xform->aead.op = RTE_CRYPTO_AEAD_OP_ENCRYPT;
273 xform->aead.op = RTE_CRYPTO_AEAD_OP_DECRYPT;
278 crypto_set_cipher_xform (struct rte_crypto_sym_xform *xform,
279 ipsec_sa_t * sa, u8 is_outbound)
281 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
284 c = vec_elt_at_index (dcm->cipher_algs, sa->crypto_alg);
286 ASSERT (c->type == RTE_CRYPTO_SYM_XFORM_CIPHER);
288 xform->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
289 xform->cipher.algo = c->alg;
290 xform->cipher.key.data = sa->crypto_key;
291 xform->cipher.key.length = c->key_len;
293 xform->cipher.iv.offset =
294 crypto_op_get_priv_offset () + offsetof (dpdk_op_priv_t, cb);
295 xform->cipher.iv.length = c->iv_len;
300 xform->cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
302 xform->cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT;
306 crypto_set_auth_xform (struct rte_crypto_sym_xform *xform,
307 ipsec_sa_t * sa, u8 is_outbound)
309 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
312 a = vec_elt_at_index (dcm->auth_algs, sa->integ_alg);
314 ASSERT (a->type == RTE_CRYPTO_SYM_XFORM_AUTH);
316 xform->type = RTE_CRYPTO_SYM_XFORM_AUTH;
317 xform->auth.algo = a->alg;
318 xform->auth.key.data = sa->integ_key;
319 xform->auth.key.length = a->key_len;
320 xform->auth.digest_length = a->trunc_size;
322 if (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_128 ||
323 sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_192 ||
324 sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_256)
325 xform->auth.algo = RTE_CRYPTO_AUTH_AES_GCM;
326 xform->auth.add_auth_data_length = sa->use_esn ? 12 : 8;
329 xform->auth.iv.offset =
330 sizeof (struct rte_crypto_op) + sizeof (struct rte_crypto_sym_op) +
331 offsetof (dpdk_op_priv_t, cb);
332 xform->auth.iv.length = a->iv_len;
338 xform->auth.op = RTE_CRYPTO_AUTH_OP_GENERATE;
340 xform->auth.op = RTE_CRYPTO_AUTH_OP_VERIFY;
344 create_sym_session (struct rte_cryptodev_sym_session **session,
346 crypto_resource_t * res,
347 crypto_worker_main_t * cwm, u8 is_outbound)
349 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
350 ipsec_main_t *im = &ipsec_main;
353 struct rte_crypto_sym_xform cipher_xform = { 0 };
354 struct rte_crypto_sym_xform auth_xform = { 0 };
355 struct rte_crypto_sym_xform *xfs;
356 crypto_session_key_t key = { 0 };
358 key.drv_id = res->drv_id;
361 sa = pool_elt_at_index (im->sad, sa_idx);
364 if ((sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_128) |
365 (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_192) |
366 (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_256))
368 crypto_set_aead_xform (&cipher_xform, sa, is_outbound);
372 #endif /* ! DPDK_NO_AEAD */
374 crypto_set_cipher_xform (&cipher_xform, sa, is_outbound);
375 crypto_set_auth_xform (&auth_xform, sa, is_outbound);
379 cipher_xform.next = &auth_xform;
384 auth_xform.next = &cipher_xform;
389 data = vec_elt_at_index (dcm->data, res->numa);
394 * Each worker/thread has its own session per device driver
396 session[0] = rte_cryptodev_sym_session_create (res->dev_id, xfs);
399 data->session_drv_failed[res->drv_id] += 1;
400 return clib_error_return (0, "failed to create session for dev %u",
406 * Multiple worker/threads share the session for an SA
407 * Single session per SA, initialized for each device driver
409 session[0] = (void *) hash_get (data->session_by_sa_index, sa_idx);
413 session[0] = rte_cryptodev_sym_session_create (data->session_h);
416 data->session_h_failed += 1;
417 return clib_error_return (0, "failed to create session header");
419 hash_set (data->session_by_sa_index, sa_idx, session[0]);
422 struct rte_mempool **mp;
423 mp = vec_elt_at_index (data->session_drv, res->drv_id);
424 ASSERT (mp[0] != NULL);
427 rte_cryptodev_sym_session_init (res->dev_id, session[0], xfs, mp[0]);
430 data->session_drv_failed[res->drv_id] += 1;
431 return clib_error_return (0, "failed to init session for drv %u",
434 #endif /* DPDK_NO_AEAD */
436 hash_set (cwm->session_by_drv_id_and_sa_index, key.val, session[0]);
441 static void __attribute__ ((unused)) clear_and_free_obj (void *obj)
443 struct rte_mempool *mp = rte_mempool_from_obj (obj);
445 memset (obj, 0, mp->elt_size);
447 rte_mempool_put (mp, obj);
451 /* This is from rte_cryptodev_pmd.h */
453 get_session_private_data (const struct rte_cryptodev_sym_session *sess,
456 return sess->sess_private_data[driver_id];
459 /* This is from rte_cryptodev_pmd.h */
461 set_session_private_data (struct rte_cryptodev_sym_session *sess,
462 uint8_t driver_id, void *private_data)
464 sess->sess_private_data[driver_id] = private_data;
468 static clib_error_t *
469 add_del_sa_session (u32 sa_index, u8 is_add)
471 ipsec_main_t *im = &ipsec_main;
472 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
473 crypto_worker_main_t *cwm;
474 struct rte_cryptodev_sym_session *s;
475 crypto_session_key_t key = { 0 };
480 key.sa_idx = sa_index;
485 ipsec_sa_t *sa = pool_elt_at_index (im->sad, sa_index);
487 switch (sa->crypto_alg)
489 case IPSEC_CRYPTO_ALG_AES_GCM_128:
490 case IPSEC_CRYPTO_ALG_AES_GCM_192:
491 case IPSEC_CRYPTO_ALG_AES_GCM_256:
492 clib_memcpy (&sa->salt, &sa->crypto_key[sa->crypto_key_len - 4], 4);
495 seed = (u32) clib_cpu_time_now ();
496 sa->salt = random_u32 (&seed);
502 /* XXX Wait N cycles to be sure session is not in use OR
503 * keep refcnt at SA level per worker/thread ? */
507 vec_foreach (cwm, dcm->workers_main)
509 for (drv_id = 0; drv_id < dcm->max_drv_id; drv_id++)
512 val = hash_get (cwm->session_by_drv_id_and_sa_index, key.val);
513 s = (struct rte_cryptodev_sym_session *) val;
519 ret = (rte_cryptodev_sym_session_free (s->dev_id, s) == NULL);
522 hash_unset (cwm->session_by_drv_id_and_sa_index, key.val);
530 vec_foreach (data, dcm->data)
532 val = hash_get (data->session_by_sa_index, sa_index);
533 s = (struct rte_cryptodev_sym_session *) val;
538 hash_unset (data->session_by_sa_index, sa_index);
541 vec_foreach_index (drv_id, dcm->drv)
543 drv_session = get_session_private_data (s, drv_id);
548 * Custom clear to avoid finding a dev_id for drv_id:
549 * ret = rte_cryptodev_sym_session_clear (dev_id, drv_session);
552 clear_and_free_obj (drv_session);
554 set_session_private_data (s, drv_id, NULL);
557 ret = rte_cryptodev_sym_session_free(s);
566 static clib_error_t *
567 dpdk_ipsec_check_support (ipsec_sa_t * sa)
569 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
571 if (sa->integ_alg == IPSEC_INTEG_ALG_NONE)
572 switch (sa->crypto_alg)
574 case IPSEC_CRYPTO_ALG_AES_GCM_128:
575 case IPSEC_CRYPTO_ALG_AES_GCM_192:
576 case IPSEC_CRYPTO_ALG_AES_GCM_256:
579 return clib_error_return (0, "unsupported integ-alg %U crypto-alg %U",
580 format_ipsec_integ_alg, sa->integ_alg,
581 format_ipsec_crypto_alg, sa->crypto_alg);
584 /* XXX do we need the NONE check? */
585 if (sa->crypto_alg != IPSEC_CRYPTO_ALG_NONE &&
586 dcm->cipher_algs[sa->crypto_alg].disabled)
587 return clib_error_return (0, "disabled crypto-alg %U",
588 format_ipsec_crypto_alg, sa->crypto_alg);
590 /* XXX do we need the NONE check? */
591 if (sa->integ_alg != IPSEC_INTEG_ALG_NONE &&
592 dcm->auth_algs[sa->integ_alg].disabled)
593 return clib_error_return (0, "disabled integ-alg %U",
594 format_ipsec_integ_alg, sa->integ_alg);
599 crypto_parse_capabilities (crypto_dev_t * dev,
600 const struct rte_cryptodev_capabilities *cap,
603 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
607 for (; cap->op != RTE_CRYPTO_OP_TYPE_UNDEFINED; cap++)
609 /* A single capability maps to multiple cipher/auth algorithms */
610 switch (cap->sym.xform_type)
613 case RTE_CRYPTO_SYM_XFORM_AEAD:
615 case RTE_CRYPTO_SYM_XFORM_CIPHER:
616 inc = cap->sym.cipher.key_size.increment;
618 for (len = cap->sym.cipher.key_size.min;
619 len <= cap->sym.cipher.key_size.max; len += inc)
621 alg = cipher_cap_to_alg (cap, len);
624 dev->cipher_support[cipher_alg_index (alg)] = 1;
625 alg->resources += vec_len (dev->free_resources);
626 /* At least enough resources to support one algo */
627 dcm->enabled |= (alg->resources >= n_mains);
630 case RTE_CRYPTO_SYM_XFORM_AUTH:
631 inc = cap->sym.auth.digest_size.increment;
633 for (len = cap->sym.auth.digest_size.min;
634 len <= cap->sym.auth.digest_size.max; len += inc)
636 alg = auth_cap_to_alg (cap, len);
639 dev->auth_support[auth_alg_index (alg)] = 1;
640 alg->resources += vec_len (dev->free_resources);
641 /* At least enough resources to support one algo */
642 dcm->enabled |= (alg->resources >= n_mains);
651 #define DPDK_CRYPTO_N_QUEUE_DESC 2048
652 #define DPDK_CRYPTO_NB_SESS_OBJS 20000
654 static clib_error_t *
655 crypto_dev_conf (u8 dev, u16 n_qp, u8 numa)
657 struct rte_cryptodev_config dev_conf;
658 struct rte_cryptodev_qp_conf qp_conf;
663 dev_conf.socket_id = numa;
664 dev_conf.nb_queue_pairs = n_qp;
666 dev_conf.session_mp.nb_objs = DPDK_CRYPTO_NB_SESS_OBJS;
667 dev_conf.session_mp.cache_size = 512;
670 error_str = "failed to configure crypto device %u";
671 ret = rte_cryptodev_configure (dev, &dev_conf);
673 return clib_error_return (0, error_str, dev);
675 error_str = "failed to setup crypto device %u queue pair %u";
676 qp_conf.nb_descriptors = DPDK_CRYPTO_N_QUEUE_DESC;
677 for (qp = 0; qp < n_qp; qp++)
680 ret = rte_cryptodev_queue_pair_setup (dev, qp, &qp_conf, numa);
682 ret = rte_cryptodev_queue_pair_setup (dev, qp, &qp_conf, numa, NULL);
685 return clib_error_return (0, error_str, dev, qp);
692 crypto_scan_devs (u32 n_mains)
694 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
695 struct rte_cryptodev *cryptodev;
696 struct rte_cryptodev_info info;
698 crypto_resource_t *res;
701 u16 max_res_idx, res_idx, j;
704 vec_validate_init_empty (dcm->dev, rte_cryptodev_count () - 1,
705 (crypto_dev_t) EMPTY_STRUCT);
707 for (i = 0; i < rte_cryptodev_count (); i++)
709 dev = vec_elt_at_index (dcm->dev, i);
711 cryptodev = &rte_cryptodevs[i];
712 rte_cryptodev_info_get (i, &info);
715 dev->name = cryptodev->data->name;
716 dev->numa = rte_cryptodev_socket_id (i);
717 dev->features = info.feature_flags;
718 dev->max_qp = info.max_nb_queue_pairs;
720 drv_id = cryptodev->dev_type;
722 drv_id = info.driver_id;
724 if (drv_id >= vec_len (dcm->drv))
725 vec_validate_init_empty (dcm->drv, drv_id,
726 (crypto_drv_t) EMPTY_STRUCT);
727 vec_elt_at_index (dcm->drv, drv_id)->name = info.driver_name;
728 dev->drv_id = drv_id;
729 vec_add1 (vec_elt_at_index (dcm->drv, drv_id)->devs, i);
731 if (!(info.feature_flags & RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING))
734 if ((error = crypto_dev_conf (i, dev->max_qp, dev->numa)))
736 clib_error_report (error);
740 max_res_idx = (dev->max_qp / 2) - 1;
742 vec_validate (dev->free_resources, max_res_idx);
744 res_idx = vec_len (dcm->resource);
745 vec_validate_init_empty_aligned (dcm->resource, res_idx + max_res_idx,
746 (crypto_resource_t) EMPTY_STRUCT,
747 CLIB_CACHE_LINE_BYTES);
749 for (j = 0; j <= max_res_idx; j++, res_idx++)
751 vec_elt (dev->free_resources, max_res_idx - j) = res_idx;
752 res = &dcm->resource[res_idx];
754 res->drv_id = drv_id;
756 res->numa = dev->numa;
757 res->thread_idx = (u16) ~ 0;
760 crypto_parse_capabilities (dev, info.capabilities, n_mains);
765 crypto_auto_placement (void)
767 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
768 crypto_resource_t *res;
769 crypto_worker_main_t *cwm;
771 u32 thread_idx, skip_master;
776 skip_master = vlib_num_workers () > 0;
779 vec_foreach (dev, dcm->dev)
781 vec_foreach_index (thread_idx, dcm->workers_main)
783 if (vec_len (dev->free_resources) == 0)
786 if (thread_idx < skip_master)
789 /* Check thread is not already using the device */
790 vec_foreach (idx, dev->used_resources)
791 if (dcm->resource[idx[0]].thread_idx == thread_idx)
794 cwm = vec_elt_at_index (dcm->workers_main, thread_idx);
797 res_idx = vec_pop (dev->free_resources);
799 /* Set device only for supported algos */
800 for (i = 0; i < IPSEC_CRYPTO_N_ALG; i++)
801 if (dev->cipher_support[i] &&
802 cwm->cipher_resource_idx[i] == (u16) ~0)
804 dcm->cipher_algs[i].disabled--;
805 cwm->cipher_resource_idx[i] = res_idx;
809 for (i = 0; i < IPSEC_INTEG_N_ALG; i++)
810 if (dev->auth_support[i] &&
811 cwm->auth_resource_idx[i] == (u16) ~0)
813 dcm->auth_algs[i].disabled--;
814 cwm->auth_resource_idx[i] = res_idx;
820 vec_add1 (dev->free_resources, res_idx);
824 vec_add1 (dev->used_resources, res_idx);
826 res = vec_elt_at_index (dcm->resource, res_idx);
828 ASSERT (res->thread_idx == (u16) ~0);
829 res->thread_idx = thread_idx;
831 /* Add device to vector of polling resources */
832 vec_add1 (cwm->resource_idx, res_idx);
839 crypto_op_init (struct rte_mempool *mempool,
840 void *_arg __attribute__ ((unused)),
841 void *_obj, unsigned i __attribute__ ((unused)))
843 struct rte_crypto_op *op = _obj;
846 op->sym = (struct rte_crypto_sym_op *) (op + 1);
847 op->sym->sess_type = RTE_CRYPTO_SYM_OP_WITH_SESSION;
849 op->sess_type = RTE_CRYPTO_OP_WITH_SESSION;
851 op->type = RTE_CRYPTO_OP_TYPE_SYMMETRIC;
852 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
853 op->phys_addr = rte_mem_virt2phy (_obj);
854 op->mempool = mempool;
857 static clib_error_t *
858 crypto_create_crypto_op_pool (vlib_main_t * vm, u8 numa)
860 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
861 dpdk_config_main_t *conf = &dpdk_config_main;
864 u32 pool_priv_size = sizeof (struct rte_crypto_op_pool_private);
865 struct rte_crypto_op_pool_private *priv;
866 struct rte_mempool *mp;
867 clib_error_t *error = NULL;
868 vlib_physmem_region_index_t pri;
870 data = vec_elt_at_index (dcm->data, numa);
872 /* Already allocated */
876 pool_name = format (0, "crypto_pool_numa%u%c", numa, 0);
879 dpdk_pool_create (vm, pool_name, crypto_op_len (), conf->num_mbufs,
880 pool_priv_size, 512, numa, &mp, &pri);
882 vec_free (pool_name);
887 /* Initialize mempool private data */
888 priv = rte_mempool_get_priv (mp);
889 priv->priv_size = pool_priv_size;
890 priv->type = RTE_CRYPTO_OP_TYPE_SYMMETRIC;
892 /* call the object initializers */
893 rte_mempool_obj_iter (mp, crypto_op_init, 0);
895 data->crypto_op = mp;
900 static clib_error_t *
901 crypto_create_session_h_pool (vlib_main_t * vm, u8 numa)
906 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
909 struct rte_mempool *mp;
910 clib_error_t *error = NULL;
911 vlib_physmem_region_index_t pri;
914 data = vec_elt_at_index (dcm->data, numa);
919 pool_name = format (0, "session_h_pool_numa%u%c", numa, 0);
921 elt_size = rte_cryptodev_get_header_session_size ();
924 dpdk_pool_create (vm, pool_name, elt_size, DPDK_CRYPTO_NB_SESS_OBJS,
925 0, 512, numa, &mp, &pri);
927 vec_free (pool_name);
932 data->session_h = mp;
938 static clib_error_t *
939 crypto_create_session_drv_pool (vlib_main_t * vm, crypto_dev_t * dev)
944 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
947 struct rte_mempool *mp;
948 clib_error_t *error = NULL;
949 vlib_physmem_region_index_t pri;
953 data = vec_elt_at_index (dcm->data, numa);
955 vec_validate (data->session_drv, dev->drv_id);
956 vec_validate (data->session_drv_failed, dev->drv_id);
958 if (data->session_drv[dev->drv_id])
961 pool_name = format (0, "session_drv%u_pool_numa%u%c", dev->drv_id, numa, 0);
962 elt_size = rte_cryptodev_get_private_session_size (dev->id);
965 dpdk_pool_create (vm, pool_name, elt_size, DPDK_CRYPTO_NB_SESS_OBJS,
966 0, 512, numa, &mp, &pri);
968 vec_free (pool_name);
973 data->session_drv[dev->drv_id] = mp;
979 static clib_error_t *
980 crypto_create_pools (vlib_main_t * vm)
982 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
983 clib_error_t *error = NULL;
987 vec_foreach (dev, dcm->dev)
989 vec_validate_aligned (dcm->data, dev->numa, CLIB_CACHE_LINE_BYTES);
991 error = crypto_create_crypto_op_pool (vm, dev->numa);
995 error = crypto_create_session_h_pool (vm, dev->numa);
999 error = crypto_create_session_drv_pool (vm, dev);
1009 crypto_disable (void)
1011 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
1012 crypto_data_t *data;
1018 vec_foreach (data, dcm->data)
1020 rte_mempool_free (data->crypto_op);
1021 rte_mempool_free (data->session_h);
1023 vec_foreach_index (i, data->session_drv)
1024 rte_mempool_free (data->session_drv[i]);
1026 vec_free (data->session_drv);
1030 vec_free (dcm->data);
1031 vec_free (dcm->workers_main);
1032 vec_free (dcm->sa_session);
1033 vec_free (dcm->dev);
1034 vec_free (dcm->resource);
1035 vec_free (dcm->cipher_algs);
1036 vec_free (dcm->auth_algs);
1040 dpdk_ipsec_process (vlib_main_t * vm, vlib_node_runtime_t * rt,
1043 ipsec_main_t *im = &ipsec_main;
1044 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
1045 vlib_thread_main_t *tm = vlib_get_thread_main ();
1046 crypto_worker_main_t *cwm;
1047 clib_error_t *error = NULL;
1048 u32 i, skip_master, n_mains;
1050 n_mains = tm->n_vlib_mains;
1051 skip_master = vlib_num_workers () > 0;
1053 algos_init (n_mains - skip_master);
1055 crypto_scan_devs (n_mains - skip_master);
1057 if (!(dcm->enabled))
1059 clib_warning ("not enough DPDK crypto resources, default to OpenSSL");
1064 vec_validate_init_empty_aligned (dcm->workers_main, n_mains - 1,
1065 (crypto_worker_main_t) EMPTY_STRUCT,
1066 CLIB_CACHE_LINE_BYTES);
1069 vec_foreach (cwm, dcm->workers_main)
1071 vec_validate_init_empty_aligned (cwm->ops, VLIB_FRAME_SIZE - 1, 0,
1072 CLIB_CACHE_LINE_BYTES);
1073 memset (cwm->cipher_resource_idx, ~0,
1074 IPSEC_CRYPTO_N_ALG * sizeof(*cwm->cipher_resource_idx));
1075 memset (cwm->auth_resource_idx, ~0,
1076 IPSEC_INTEG_N_ALG * sizeof(*cwm->auth_resource_idx));
1080 crypto_auto_placement ();
1082 error = crypto_create_pools (vm);
1085 clib_error_report (error);
1090 /* Add new next node and set it as default */
1091 vlib_node_t *node, *next_node;
1093 next_node = vlib_get_node_by_name (vm, (u8 *) "dpdk-esp-encrypt");
1095 node = vlib_get_node_by_name (vm, (u8 *) "ipsec-output-ip4");
1097 im->esp_encrypt_node_index = next_node->index;
1098 im->esp_encrypt_next_index =
1099 vlib_node_add_next (vm, node->index, next_node->index);
1101 next_node = vlib_get_node_by_name (vm, (u8 *) "dpdk-esp-decrypt");
1103 node = vlib_get_node_by_name (vm, (u8 *) "ipsec-input-ip4");
1105 im->esp_decrypt_node_index = next_node->index;
1106 im->esp_decrypt_next_index =
1107 vlib_node_add_next (vm, node->index, next_node->index);
1109 im->cb.check_support_cb = dpdk_ipsec_check_support;
1110 im->cb.add_del_sa_sess_cb = add_del_sa_session;
1112 node = vlib_get_node_by_name (vm, (u8 *) "dpdk-crypto-input");
1114 for (i = skip_master; i < n_mains; i++)
1115 vlib_node_set_state (vlib_mains[i], node->index, VLIB_NODE_STATE_POLLING);
1120 VLIB_REGISTER_NODE (dpdk_ipsec_process_node,static) = {
1121 .function = dpdk_ipsec_process,
1122 .type = VLIB_NODE_TYPE_PROCESS,
1123 .name = "dpdk-ipsec-process",
1124 .process_log2_n_stack_bytes = 17,
1129 * fd.io coding-style-patch-verification: ON
1132 * eval: (c-set-style "gnu")