2 * Copyright (c) 2017 Intel and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
15 #include <vnet/vnet.h>
16 #include <vnet/ip/ip.h>
17 #include <vnet/api_errno.h>
18 #include <vnet/ipsec/ipsec.h>
19 #include <vlib/node_funcs.h>
21 #include <dpdk/device/dpdk.h>
22 #include <dpdk/ipsec/ipsec.h>
24 #define EMPTY_STRUCT {0}
27 algos_init (u32 n_mains)
29 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
32 vec_validate_aligned (dcm->cipher_algs, IPSEC_CRYPTO_N_ALG - 1, 8);
36 dcm->cipher_algs[IPSEC_CRYPTO_ALG_##f].name = str; \
37 dcm->cipher_algs[IPSEC_CRYPTO_ALG_##f].disabled = n_mains;
38 foreach_ipsec_crypto_alg
42 /* Minimum boundary for ciphers is 4B, required by ESP */
43 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_NONE];
44 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
45 a->alg = RTE_CRYPTO_CIPHER_NULL;
46 a->boundary = 4; /* 1 */
50 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_128];
51 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
52 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
57 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_192];
58 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
59 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
64 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CBC_256];
65 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
66 a->alg = RTE_CRYPTO_CIPHER_AES_CBC;
71 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_128];
72 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
73 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
74 a->boundary = 4; /* 1 */
78 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_192];
79 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
80 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
81 a->boundary = 4; /* 1 */
85 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_CTR_256];
86 a->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
87 a->alg = RTE_CRYPTO_CIPHER_AES_CTR;
88 a->boundary = 4; /* 1 */
93 #define AES_GCM_TYPE RTE_CRYPTO_SYM_XFORM_CIPHER
94 #define AES_GCM_ALG RTE_CRYPTO_CIPHER_AES_GCM
96 #define AES_GCM_TYPE RTE_CRYPTO_SYM_XFORM_AEAD
97 #define AES_GCM_ALG RTE_CRYPTO_AEAD_AES_GCM
100 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_128];
101 a->type = AES_GCM_TYPE;
102 a->alg = AES_GCM_ALG;
103 a->boundary = 4; /* 1 */
108 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_192];
109 a->type = AES_GCM_TYPE;
110 a->alg = AES_GCM_ALG;
111 a->boundary = 4; /* 1 */
116 a = &dcm->cipher_algs[IPSEC_CRYPTO_ALG_AES_GCM_256];
117 a->type = AES_GCM_TYPE;
118 a->alg = AES_GCM_ALG;
119 a->boundary = 4; /* 1 */
124 vec_validate (dcm->auth_algs, IPSEC_INTEG_N_ALG - 1);
128 dcm->auth_algs[IPSEC_INTEG_ALG_##f].name = str; \
129 dcm->auth_algs[IPSEC_INTEG_ALG_##f].disabled = n_mains;
130 foreach_ipsec_integ_alg
134 a = &dcm->auth_algs[IPSEC_INTEG_ALG_NONE];
135 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
136 a->alg = RTE_CRYPTO_AUTH_NULL;
140 a = &dcm->auth_algs[IPSEC_INTEG_ALG_MD5_96];
141 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
142 a->alg = RTE_CRYPTO_AUTH_MD5_HMAC;
146 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA1_96];
147 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
148 a->alg = RTE_CRYPTO_AUTH_SHA1_HMAC;
152 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_256_96];
153 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
154 a->alg = RTE_CRYPTO_AUTH_SHA256_HMAC;
158 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_256_128];
159 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
160 a->alg = RTE_CRYPTO_AUTH_SHA256_HMAC;
164 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_384_192];
165 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
166 a->alg = RTE_CRYPTO_AUTH_SHA384_HMAC;
170 a = &dcm->auth_algs[IPSEC_INTEG_ALG_SHA_512_256];
171 a->type = RTE_CRYPTO_SYM_XFORM_AUTH;
172 a->alg = RTE_CRYPTO_AUTH_SHA512_HMAC;
178 cipher_alg_index (const crypto_alg_t * alg)
180 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
182 return (alg - dcm->cipher_algs);
186 auth_alg_index (const crypto_alg_t * alg)
188 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
190 return (alg - dcm->auth_algs);
193 static crypto_alg_t *
194 cipher_cap_to_alg (const struct rte_cryptodev_capabilities *cap, u8 key_len)
196 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
199 if (cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC)
203 vec_foreach (alg, dcm->cipher_algs)
205 if ((cap->sym.xform_type == RTE_CRYPTO_SYM_XFORM_CIPHER) &&
206 (alg->type == RTE_CRYPTO_SYM_XFORM_CIPHER) &&
207 (cap->sym.cipher.algo == alg->alg) &&
208 (alg->key_len == key_len))
211 if ((cap->sym.xform_type == RTE_CRYPTO_SYM_XFORM_AEAD) &&
212 (alg->type == RTE_CRYPTO_SYM_XFORM_AEAD) &&
213 (cap->sym.aead.algo == alg->alg) &&
214 (alg->key_len == key_len))
223 static crypto_alg_t *
224 auth_cap_to_alg (const struct rte_cryptodev_capabilities *cap, u8 trunc_size)
226 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
229 if ((cap->op != RTE_CRYPTO_OP_TYPE_SYMMETRIC) ||
230 (cap->sym.xform_type != RTE_CRYPTO_SYM_XFORM_AUTH))
234 vec_foreach (alg, dcm->auth_algs)
236 if ((cap->sym.auth.algo == alg->alg) &&
237 (alg->trunc_size == trunc_size))
247 crypto_set_aead_xform (struct rte_crypto_sym_xform *xform,
248 ipsec_sa_t * sa, u8 is_outbound)
250 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
253 c = vec_elt_at_index (dcm->cipher_algs, sa->crypto_alg);
255 ASSERT (c->type == RTE_CRYPTO_SYM_XFORM_AEAD);
257 xform->type = RTE_CRYPTO_SYM_XFORM_AEAD;
258 xform->aead.algo = c->alg;
259 xform->aead.key.data = sa->crypto_key;
260 xform->aead.key.length = c->key_len;
261 xform->aead.iv.offset =
262 crypto_op_get_priv_offset () + offsetof (dpdk_op_priv_t, cb);
263 xform->aead.iv.length = 12;
264 xform->aead.digest_length = c->trunc_size;
265 xform->aead.aad_length = sa->use_esn ? 12 : 8;
269 xform->aead.op = RTE_CRYPTO_AEAD_OP_ENCRYPT;
271 xform->aead.op = RTE_CRYPTO_AEAD_OP_DECRYPT;
276 crypto_set_cipher_xform (struct rte_crypto_sym_xform *xform,
277 ipsec_sa_t * sa, u8 is_outbound)
279 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
282 c = vec_elt_at_index (dcm->cipher_algs, sa->crypto_alg);
284 ASSERT (c->type == RTE_CRYPTO_SYM_XFORM_CIPHER);
286 xform->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
287 xform->cipher.algo = c->alg;
288 xform->cipher.key.data = sa->crypto_key;
289 xform->cipher.key.length = c->key_len;
291 xform->cipher.iv.offset =
292 crypto_op_get_priv_offset () + offsetof (dpdk_op_priv_t, cb);
293 xform->cipher.iv.length = c->iv_len;
298 xform->cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
300 xform->cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT;
304 crypto_set_auth_xform (struct rte_crypto_sym_xform *xform,
305 ipsec_sa_t * sa, u8 is_outbound)
307 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
310 a = vec_elt_at_index (dcm->auth_algs, sa->integ_alg);
312 ASSERT (a->type == RTE_CRYPTO_SYM_XFORM_AUTH);
314 xform->type = RTE_CRYPTO_SYM_XFORM_AUTH;
315 xform->auth.algo = a->alg;
316 xform->auth.key.data = sa->integ_key;
317 xform->auth.key.length = a->key_len;
318 xform->auth.digest_length = a->trunc_size;
320 if (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_128 |
321 sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_192 |
322 sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_256)
323 xform->auth.algo = RTE_CRYPTO_AUTH_AES_GCM;
324 xform->auth.add_auth_data_length = sa->use_esn ? 12 : 8;
327 xform->auth.iv.offset =
328 sizeof (struct rte_crypto_op) + sizeof (struct rte_crypto_sym_op) +
329 offsetof (dpdk_op_priv_t, cb);
330 xform->auth.iv.length = a->iv_len;
336 xform->auth.op = RTE_CRYPTO_AUTH_OP_GENERATE;
338 xform->auth.op = RTE_CRYPTO_AUTH_OP_VERIFY;
342 create_sym_session (struct rte_cryptodev_sym_session **session,
344 crypto_resource_t * res,
345 crypto_worker_main_t * cwm, u8 is_outbound)
347 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
348 ipsec_main_t *im = &ipsec_main;
351 struct rte_crypto_sym_xform cipher_xform = { 0 };
352 struct rte_crypto_sym_xform auth_xform = { 0 };
353 struct rte_crypto_sym_xform *xfs;
354 crypto_session_key_t key = { 0 };
356 key.drv_id = res->drv_id;
359 sa = pool_elt_at_index (im->sad, sa_idx);
362 if ((sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_128) |
363 (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_192) |
364 (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_256))
366 crypto_set_aead_xform (&cipher_xform, sa, is_outbound);
370 #endif /* ! DPDK_NO_AEAD */
372 crypto_set_cipher_xform (&cipher_xform, sa, is_outbound);
373 crypto_set_auth_xform (&auth_xform, sa, is_outbound);
377 cipher_xform.next = &auth_xform;
382 auth_xform.next = &cipher_xform;
387 data = vec_elt_at_index (dcm->data, res->numa);
392 * Each worker/thread has its own session per device driver
394 session[0] = rte_cryptodev_sym_session_create (res->dev_id, xfs);
397 data->session_drv_failed[res->drv_id] += 1;
398 return clib_error_return (0, "failed to create session for dev %u",
404 * Multiple worker/threads share the session for an SA
405 * Single session per SA, initialized for each device driver
407 session[0] = (void *) hash_get (data->session_by_sa_index, sa_idx);
411 session[0] = rte_cryptodev_sym_session_create (data->session_h);
414 data->session_h_failed += 1;
415 return clib_error_return (0, "failed to create session header");
417 hash_set (data->session_by_sa_index, sa_idx, session[0]);
420 struct rte_mempool **mp;
421 mp = vec_elt_at_index (data->session_drv, res->drv_id);
422 ASSERT (mp[0] != NULL);
425 rte_cryptodev_sym_session_init (res->dev_id, session[0], xfs, mp[0]);
428 data->session_drv_failed[res->drv_id] += 1;
429 return clib_error_return (0, "failed to init session for drv %u",
432 #endif /* DPDK_NO_AEAD */
434 hash_set (cwm->session_by_drv_id_and_sa_index, key.val, session[0]);
439 static void __attribute__ ((unused)) clear_and_free_obj (void *obj)
441 struct rte_mempool *mp = rte_mempool_from_obj (obj);
443 memset (obj, 0, mp->elt_size);
445 rte_mempool_put (mp, obj);
449 /* This is from rte_cryptodev_pmd.h */
451 get_session_private_data (const struct rte_cryptodev_sym_session *sess,
454 return sess->sess_private_data[driver_id];
457 /* This is from rte_cryptodev_pmd.h */
459 set_session_private_data (struct rte_cryptodev_sym_session *sess,
460 uint8_t driver_id, void *private_data)
462 sess->sess_private_data[driver_id] = private_data;
466 static clib_error_t *
467 add_del_sa_session (u32 sa_index, u8 is_add)
469 ipsec_main_t *im = &ipsec_main;
470 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
471 crypto_worker_main_t *cwm;
472 struct rte_cryptodev_sym_session *s;
473 crypto_session_key_t key = { 0 };
478 key.sa_idx = sa_index;
483 ipsec_sa_t *sa = pool_elt_at_index (im->sad, sa_index);
485 switch (sa->crypto_alg)
487 case IPSEC_CRYPTO_ALG_AES_GCM_128:
488 case IPSEC_CRYPTO_ALG_AES_GCM_192:
489 case IPSEC_CRYPTO_ALG_AES_GCM_256:
490 clib_memcpy (&sa->salt, &sa->crypto_key[sa->crypto_key_len - 4], 4);
493 seed = (u32) clib_cpu_time_now ();
494 sa->salt = random_u32 (&seed);
500 /* XXX Wait N cycles to be sure session is not in use OR
501 * keep refcnt at SA level per worker/thread ? */
505 vec_foreach (cwm, dcm->workers_main)
507 for (drv_id = 0; drv_id < dcm->max_drv_id; drv_id++)
510 val = hash_get (cwm->session_by_drv_id_and_sa_index, key.val);
511 s = (struct rte_cryptodev_sym_session *) val;
517 ret = (rte_cryptodev_sym_session_free (s->dev_id, s) == NULL);
520 hash_unset (cwm->session_by_drv_id_and_sa_index, key.val);
528 vec_foreach (data, dcm->data)
530 val = hash_get (data->session_by_sa_index, sa_index);
531 s = (struct rte_cryptodev_sym_session *) val;
536 hash_unset (data->session_by_sa_index, sa_index);
539 vec_foreach_index (drv_id, dcm->drv)
541 drv_session = get_session_private_data (s, drv_id);
546 * Custom clear to avoid finding a dev_id for drv_id:
547 * ret = rte_cryptodev_sym_session_clear (dev_id, drv_session);
550 clear_and_free_obj (drv_session);
552 set_session_private_data (s, drv_id, NULL);
555 ret = rte_cryptodev_sym_session_free(s);
564 static clib_error_t *
565 dpdk_ipsec_check_support (ipsec_sa_t * sa)
567 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
569 if (sa->integ_alg == IPSEC_INTEG_ALG_NONE)
570 switch (sa->crypto_alg)
572 case IPSEC_CRYPTO_ALG_AES_GCM_128:
573 case IPSEC_CRYPTO_ALG_AES_GCM_192:
574 case IPSEC_CRYPTO_ALG_AES_GCM_256:
577 return clib_error_return (0, "unsupported integ-alg %U crypto-alg %U",
578 format_ipsec_integ_alg, sa->integ_alg,
579 format_ipsec_crypto_alg, sa->crypto_alg);
582 /* XXX do we need the NONE check? */
583 if (sa->crypto_alg != IPSEC_CRYPTO_ALG_NONE &&
584 dcm->cipher_algs[sa->crypto_alg].disabled)
585 return clib_error_return (0, "disabled crypto-alg %U",
586 format_ipsec_crypto_alg, sa->crypto_alg);
588 /* XXX do we need the NONE check? */
589 if (sa->integ_alg != IPSEC_INTEG_ALG_NONE &&
590 dcm->auth_algs[sa->integ_alg].disabled)
591 return clib_error_return (0, "disabled integ-alg %U",
592 format_ipsec_integ_alg, sa->integ_alg);
597 crypto_parse_capabilities (crypto_dev_t * dev,
598 const struct rte_cryptodev_capabilities *cap,
601 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
605 for (; cap->op != RTE_CRYPTO_OP_TYPE_UNDEFINED; cap++)
607 /* A single capability maps to multiple cipher/auth algorithms */
608 switch (cap->sym.xform_type)
611 case RTE_CRYPTO_SYM_XFORM_AEAD:
613 case RTE_CRYPTO_SYM_XFORM_CIPHER:
614 inc = cap->sym.cipher.key_size.increment;
616 for (len = cap->sym.cipher.key_size.min;
617 len <= cap->sym.cipher.key_size.max; len += inc)
619 alg = cipher_cap_to_alg (cap, len);
622 dev->cipher_support[cipher_alg_index (alg)] = 1;
623 alg->resources += vec_len (dev->free_resources);
624 /* At least enough resources to support one algo */
625 dcm->enabled |= (alg->resources >= n_mains);
628 case RTE_CRYPTO_SYM_XFORM_AUTH:
629 inc = cap->sym.auth.digest_size.increment;
631 for (len = cap->sym.auth.digest_size.min;
632 len <= cap->sym.auth.digest_size.max; len += inc)
634 alg = auth_cap_to_alg (cap, len);
637 dev->auth_support[auth_alg_index (alg)] = 1;
638 alg->resources += vec_len (dev->free_resources);
639 /* At least enough resources to support one algo */
640 dcm->enabled |= (alg->resources >= n_mains);
649 #define DPDK_CRYPTO_N_QUEUE_DESC 2048
650 #define DPDK_CRYPTO_NB_SESS_OBJS 20000
652 static clib_error_t *
653 crypto_dev_conf (u8 dev, u16 n_qp, u8 numa)
655 struct rte_cryptodev_config dev_conf;
656 struct rte_cryptodev_qp_conf qp_conf;
661 dev_conf.socket_id = numa;
662 dev_conf.nb_queue_pairs = n_qp;
664 dev_conf.session_mp.nb_objs = DPDK_CRYPTO_NB_SESS_OBJS;
665 dev_conf.session_mp.cache_size = 512;
668 error_str = "failed to configure crypto device %u";
669 ret = rte_cryptodev_configure (dev, &dev_conf);
671 return clib_error_return (0, error_str, dev);
673 error_str = "failed to setup crypto device %u queue pair %u";
674 qp_conf.nb_descriptors = DPDK_CRYPTO_N_QUEUE_DESC;
675 for (qp = 0; qp < n_qp; qp++)
678 ret = rte_cryptodev_queue_pair_setup (dev, qp, &qp_conf, numa);
680 ret = rte_cryptodev_queue_pair_setup (dev, qp, &qp_conf, numa, NULL);
683 return clib_error_return (0, error_str, dev, qp);
690 crypto_scan_devs (u32 n_mains)
692 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
693 struct rte_cryptodev *cryptodev;
694 struct rte_cryptodev_info info;
696 crypto_resource_t *res;
699 u16 max_res_idx, res_idx, j;
702 vec_validate_init_empty (dcm->dev, rte_cryptodev_count () - 1,
703 (crypto_dev_t) EMPTY_STRUCT);
705 for (i = 0; i < rte_cryptodev_count (); i++)
707 dev = vec_elt_at_index (dcm->dev, i);
709 cryptodev = &rte_cryptodevs[i];
710 rte_cryptodev_info_get (i, &info);
713 dev->name = cryptodev->data->name;
714 dev->numa = rte_cryptodev_socket_id (i);
715 dev->features = info.feature_flags;
716 dev->max_qp = info.max_nb_queue_pairs;
718 drv_id = cryptodev->dev_type;
720 drv_id = info.driver_id;
722 if (drv_id >= vec_len (dcm->drv))
723 vec_validate_init_empty (dcm->drv, drv_id,
724 (crypto_drv_t) EMPTY_STRUCT);
725 vec_elt_at_index (dcm->drv, drv_id)->name = info.driver_name;
726 dev->drv_id = drv_id;
727 vec_add1 (vec_elt_at_index (dcm->drv, drv_id)->devs, i);
729 if (!(info.feature_flags & RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING))
732 if ((error = crypto_dev_conf (i, dev->max_qp, dev->numa)))
734 clib_error_report (error);
738 max_res_idx = (dev->max_qp / 2) - 1;
740 vec_validate (dev->free_resources, max_res_idx);
742 res_idx = vec_len (dcm->resource);
743 vec_validate_init_empty_aligned (dcm->resource, res_idx + max_res_idx,
744 (crypto_resource_t) EMPTY_STRUCT,
745 CLIB_CACHE_LINE_BYTES);
747 for (j = 0; j <= max_res_idx; j++, res_idx++)
749 vec_elt (dev->free_resources, max_res_idx - j) = res_idx;
750 res = &dcm->resource[res_idx];
752 res->drv_id = drv_id;
754 res->numa = dev->numa;
755 res->thread_idx = (u16) ~ 0;
758 crypto_parse_capabilities (dev, info.capabilities, n_mains);
763 crypto_auto_placement (void)
765 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
766 crypto_resource_t *res;
767 crypto_worker_main_t *cwm;
769 u32 thread_idx, skip_master;
774 skip_master = vlib_num_workers () > 0;
777 vec_foreach (dev, dcm->dev)
779 vec_foreach_index (thread_idx, dcm->workers_main)
781 if (vec_len (dev->free_resources) == 0)
784 if (thread_idx < skip_master)
787 /* Check thread is not already using the device */
788 vec_foreach (idx, dev->used_resources)
789 if (dcm->resource[idx[0]].thread_idx == thread_idx)
792 cwm = vec_elt_at_index (dcm->workers_main, thread_idx);
795 res_idx = vec_pop (dev->free_resources);
797 /* Set device only for supported algos */
798 for (i = 0; i < IPSEC_CRYPTO_N_ALG; i++)
799 if (dev->cipher_support[i] &&
800 cwm->cipher_resource_idx[i] == (u16) ~0)
802 dcm->cipher_algs[i].disabled--;
803 cwm->cipher_resource_idx[i] = res_idx;
807 for (i = 0; i < IPSEC_INTEG_N_ALG; i++)
808 if (dev->auth_support[i] &&
809 cwm->auth_resource_idx[i] == (u16) ~0)
811 dcm->auth_algs[i].disabled--;
812 cwm->auth_resource_idx[i] = res_idx;
818 vec_add1 (dev->free_resources, res_idx);
822 vec_add1 (dev->used_resources, res_idx);
824 res = vec_elt_at_index (dcm->resource, res_idx);
826 ASSERT (res->thread_idx == (u16) ~0);
827 res->thread_idx = thread_idx;
829 /* Add device to vector of polling resources */
830 vec_add1 (cwm->resource_idx, res_idx);
837 crypto_op_init (struct rte_mempool *mempool,
838 void *_arg __attribute__ ((unused)),
839 void *_obj, unsigned i __attribute__ ((unused)))
841 struct rte_crypto_op *op = _obj;
844 op->sym = (struct rte_crypto_sym_op *) (op + 1);
845 op->sym->sess_type = RTE_CRYPTO_SYM_OP_WITH_SESSION;
847 op->sess_type = RTE_CRYPTO_OP_WITH_SESSION;
849 op->type = RTE_CRYPTO_OP_TYPE_SYMMETRIC;
850 op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
851 op->phys_addr = rte_mem_virt2phy (_obj);
852 op->mempool = mempool;
855 static clib_error_t *
856 crypto_create_crypto_op_pool (u8 numa)
858 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
859 dpdk_config_main_t *conf = &dpdk_config_main;
862 u32 pool_priv_size = sizeof (struct rte_crypto_op_pool_private);
863 struct rte_crypto_op_pool_private *priv;
864 clib_error_t *error = NULL;
866 data = vec_elt_at_index (dcm->data, numa);
871 pool_name = format (0, "crypto_pool_numa%u%c", numa, 0);
874 rte_mempool_create ((i8 *) pool_name, conf->num_mbufs, crypto_op_len (),
875 512, pool_priv_size, NULL, NULL, crypto_op_init, NULL,
878 if (!data->crypto_op)
880 error = clib_error_return (0, "failed to allocate %s", pool_name);
884 priv = rte_mempool_get_priv (data->crypto_op);
886 priv->priv_size = pool_priv_size;
887 priv->type = RTE_CRYPTO_OP_TYPE_SYMMETRIC;
890 vec_free (pool_name);
895 static clib_error_t *
896 crypto_create_session_h_pool (u8 numa)
901 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
905 clib_error_t *error = NULL;
907 data = vec_elt_at_index (dcm->data, numa);
912 pool_name = format (0, "session_h_pool_numa%u%c", numa, 0);
913 elt_size = rte_cryptodev_get_header_session_size ();
916 rte_mempool_create ((i8 *) pool_name, DPDK_CRYPTO_NB_SESS_OBJS, elt_size,
917 512, 0, NULL, NULL, NULL, NULL, numa, 0);
919 if (!data->session_h)
920 error = clib_error_return (0, "failed to allocate %s", pool_name);
922 vec_free (pool_name);
928 static clib_error_t *
929 crypto_create_session_drv_pool (crypto_dev_t * dev)
934 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
938 clib_error_t *error = NULL;
941 data = vec_elt_at_index (dcm->data, numa);
943 vec_validate (data->session_drv, dev->drv_id);
944 vec_validate (data->session_drv_failed, dev->drv_id);
946 if (data->session_drv[dev->drv_id])
949 pool_name = format (0, "session_drv%u_pool_numa%u%c", dev->drv_id, numa, 0);
950 elt_size = rte_cryptodev_get_private_session_size (dev->id);
952 data->session_drv[dev->drv_id] =
953 rte_mempool_create ((i8 *) pool_name, DPDK_CRYPTO_NB_SESS_OBJS, elt_size,
954 512, 0, NULL, NULL, NULL, NULL, numa, 0);
956 if (!data->session_drv[dev->drv_id])
957 error = clib_error_return (0, "failed to allocate %s", pool_name);
959 vec_free (pool_name);
965 static clib_error_t *
966 crypto_create_pools (void)
968 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
969 clib_error_t *error = NULL;
973 vec_foreach (dev, dcm->dev)
975 vec_validate (dcm->data, dev->numa);
977 error = crypto_create_crypto_op_pool (dev->numa);
981 error = crypto_create_session_h_pool (dev->numa);
985 error = crypto_create_session_drv_pool (dev);
995 crypto_disable (void)
997 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
1004 vec_foreach (data, dcm->data)
1006 rte_mempool_free (data->crypto_op);
1007 rte_mempool_free (data->session_h);
1009 vec_foreach_index (i, data->session_drv)
1010 rte_mempool_free (data->session_drv[i]);
1012 vec_free (data->session_drv);
1016 vec_free (dcm->data);
1018 vec_free (dcm->workers_main);
1019 vec_free (dcm->sa_session);
1020 vec_free (dcm->dev);
1021 vec_free (dcm->resource);
1022 vec_free (dcm->cipher_algs);
1023 vec_free (dcm->auth_algs);
1027 dpdk_ipsec_process (vlib_main_t * vm, vlib_node_runtime_t * rt,
1030 ipsec_main_t *im = &ipsec_main;
1031 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
1032 vlib_thread_main_t *tm = vlib_get_thread_main ();
1033 crypto_worker_main_t *cwm;
1034 clib_error_t *error = NULL;
1035 u32 i, skip_master, n_mains;
1037 n_mains = tm->n_vlib_mains;
1038 skip_master = vlib_num_workers () > 0;
1040 algos_init (n_mains - skip_master);
1042 crypto_scan_devs (n_mains - skip_master);
1044 if (!(dcm->enabled))
1046 clib_warning ("not enough DPDK crypto resources, default to OpenSSL");
1051 vec_validate_init_empty (dcm->workers_main, n_mains - 1,
1052 (crypto_worker_main_t) EMPTY_STRUCT);
1055 vec_foreach (cwm, dcm->workers_main)
1057 memset (cwm->cipher_resource_idx, ~0,
1058 IPSEC_CRYPTO_N_ALG * sizeof(*cwm->cipher_resource_idx));
1059 memset (cwm->auth_resource_idx, ~0,
1060 IPSEC_INTEG_N_ALG * sizeof(*cwm->auth_resource_idx));
1064 crypto_auto_placement ();
1066 error = crypto_create_pools ();
1069 clib_error_report (error);
1074 /* Add new next node and set it as default */
1075 vlib_node_t *node, *next_node;
1077 next_node = vlib_get_node_by_name (vm, (u8 *) "dpdk-esp-encrypt");
1079 node = vlib_get_node_by_name (vm, (u8 *) "ipsec-output-ip4");
1081 im->esp_encrypt_node_index = next_node->index;
1082 im->esp_encrypt_next_index =
1083 vlib_node_add_next (vm, node->index, next_node->index);
1085 next_node = vlib_get_node_by_name (vm, (u8 *) "dpdk-esp-decrypt");
1087 node = vlib_get_node_by_name (vm, (u8 *) "ipsec-input-ip4");
1089 im->esp_decrypt_node_index = next_node->index;
1090 im->esp_decrypt_next_index =
1091 vlib_node_add_next (vm, node->index, next_node->index);
1093 im->cb.check_support_cb = dpdk_ipsec_check_support;
1094 im->cb.add_del_sa_sess_cb = add_del_sa_session;
1096 node = vlib_get_node_by_name (vm, (u8 *) "dpdk-crypto-input");
1098 for (i = skip_master; i < n_mains; i++)
1099 vlib_node_set_state (vlib_mains[i], node->index, VLIB_NODE_STATE_POLLING);
1104 VLIB_REGISTER_NODE (dpdk_ipsec_process_node,static) = {
1105 .function = dpdk_ipsec_process,
1106 .type = VLIB_NODE_TYPE_PROCESS,
1107 .name = "dpdk-ipsec-process",
1108 .process_log2_n_stack_bytes = 17,
1113 * fd.io coding-style-patch-verification: ON
1116 * eval: (c-set-style "gnu")