2 * Copyright (c) 2016 Intel and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
15 #ifndef __DPDK_ESP_H__
16 #define __DPDK_ESP_H__
18 #include <vnet/devices/dpdk/ipsec/ipsec.h>
19 #include <vnet/ipsec/ipsec.h>
20 #include <vnet/ipsec/esp.h>
24 enum rte_crypto_cipher_algorithm algo;
27 } dpdk_esp_crypto_alg_t;
31 enum rte_crypto_auth_algorithm algo;
33 } dpdk_esp_integ_alg_t;
37 dpdk_esp_crypto_alg_t *esp_crypto_algs;
38 dpdk_esp_integ_alg_t *esp_integ_algs;
41 dpdk_esp_main_t dpdk_esp_main;
43 static_always_inline void
46 dpdk_esp_main_t *em = &dpdk_esp_main;
47 dpdk_esp_integ_alg_t *i;
48 dpdk_esp_crypto_alg_t *c;
50 vec_validate (em->esp_crypto_algs, IPSEC_CRYPTO_N_ALG - 1);
52 c = &em->esp_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_128];
53 c->algo = RTE_CRYPTO_CIPHER_AES_CBC;
57 c = &em->esp_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_192];
58 c->algo = RTE_CRYPTO_CIPHER_AES_CBC;
62 c = &em->esp_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_256];
63 c->algo = RTE_CRYPTO_CIPHER_AES_CBC;
67 vec_validate (em->esp_integ_algs, IPSEC_INTEG_N_ALG - 1);
69 i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA1_96];
70 i->algo = RTE_CRYPTO_AUTH_SHA1_HMAC;
73 i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_256_96];
74 i->algo = RTE_CRYPTO_AUTH_SHA256_HMAC;
77 i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_256_128];
78 i->algo = RTE_CRYPTO_AUTH_SHA256_HMAC;
81 i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_384_192];
82 i->algo = RTE_CRYPTO_AUTH_SHA384_HMAC;
85 i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_512_256];
86 i->algo = RTE_CRYPTO_AUTH_SHA512_HMAC;
90 static_always_inline int
91 add_del_sa_sess (u32 sa_index, u8 is_add)
93 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
94 crypto_worker_main_t *cwm;
95 u8 skip_master = vlib_num_workers () > 0;
98 vec_foreach (cwm, dcm->workers_main)
100 crypto_sa_session_t *sa_sess;
109 for (is_outbound = 0; is_outbound < 2; is_outbound++)
113 pool_get (cwm->sa_sess_d[is_outbound], sa_sess);
119 sa_sess = pool_elt_at_index (cwm->sa_sess_d[is_outbound], sa_index);
120 dev_id = cwm->qp_data[sa_sess->qp_index].dev_id;
125 if (rte_cryptodev_sym_session_free(dev_id, sa_sess->sess))
127 clib_warning("failed to free session");
130 memset(sa_sess, 0, sizeof(sa_sess[0]));
139 static_always_inline int
140 translate_crypto_algo(ipsec_crypto_alg_t crypto_algo,
141 struct rte_crypto_sym_xform *cipher_xform)
145 case IPSEC_CRYPTO_ALG_NONE:
146 cipher_xform->cipher.algo = RTE_CRYPTO_CIPHER_NULL;
148 case IPSEC_CRYPTO_ALG_AES_CBC_128:
149 case IPSEC_CRYPTO_ALG_AES_CBC_192:
150 case IPSEC_CRYPTO_ALG_AES_CBC_256:
151 cipher_xform->cipher.algo = RTE_CRYPTO_CIPHER_AES_CBC;
157 cipher_xform->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
162 static_always_inline int
163 translate_integ_algo(ipsec_integ_alg_t integ_alg,
164 struct rte_crypto_sym_xform *auth_xform)
167 case IPSEC_INTEG_ALG_NONE:
168 auth_xform->auth.algo = RTE_CRYPTO_AUTH_NULL;
169 auth_xform->auth.digest_length = 0;
171 case IPSEC_INTEG_ALG_SHA1_96:
172 auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA1_HMAC;
173 auth_xform->auth.digest_length = 12;
175 case IPSEC_INTEG_ALG_SHA_256_96:
176 auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA256_HMAC;
177 auth_xform->auth.digest_length = 12;
179 case IPSEC_INTEG_ALG_SHA_256_128:
180 auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA256_HMAC;
181 auth_xform->auth.digest_length = 16;
183 case IPSEC_INTEG_ALG_SHA_384_192:
184 auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA384_HMAC;
185 auth_xform->auth.digest_length = 24;
187 case IPSEC_INTEG_ALG_SHA_512_256:
188 auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA512_HMAC;
189 auth_xform->auth.digest_length = 32;
195 auth_xform->type = RTE_CRYPTO_SYM_XFORM_AUTH;
200 static_always_inline int
201 create_sym_sess(ipsec_sa_t *sa, crypto_sa_session_t *sa_sess, u8 is_outbound)
203 u32 cpu_index = os_get_cpu_number();
204 dpdk_crypto_main_t * dcm = &dpdk_crypto_main;
205 crypto_worker_main_t *cwm = &dcm->workers_main[cpu_index];
206 struct rte_crypto_sym_xform cipher_xform = {0};
207 struct rte_crypto_sym_xform auth_xform = {0};
208 struct rte_crypto_sym_xform *xfs;
209 uword key = 0, *data;
210 crypto_worker_qp_key_t *p_key = (crypto_worker_qp_key_t *)&key;
212 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
213 cipher_xform.cipher.key.data = sa->crypto_key;
214 cipher_xform.cipher.key.length = sa->crypto_key_len;
216 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
217 auth_xform.auth.key.data = sa->integ_key;
218 auth_xform.auth.key.length = sa->integ_key_len;
220 if (translate_crypto_algo(sa->crypto_alg, &cipher_xform) < 0)
222 p_key->cipher_algo = cipher_xform.cipher.algo;
224 if (translate_integ_algo(sa->integ_alg, &auth_xform) < 0)
226 p_key->auth_algo = auth_xform.auth.algo;
230 cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
231 auth_xform.auth.op = RTE_CRYPTO_AUTH_OP_GENERATE;
232 cipher_xform.next = &auth_xform;
237 cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT;
238 auth_xform.auth.op = RTE_CRYPTO_AUTH_OP_VERIFY;
239 auth_xform.next = &cipher_xform;
243 p_key->is_outbound = is_outbound;
245 data = hash_get(cwm->algo_qp_map, key);
250 rte_cryptodev_sym_session_create(cwm->qp_data[*data].dev_id, xfs);
255 sa_sess->qp_index = (u8)*data;
260 #endif /* __DPDK_ESP_H__ */
263 * fd.io coding-style-patch-verification: ON
266 * eval: (c-set-style "gnu")