2 * Copyright (c) 2017 Intel and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
15 #ifndef __DPDK_IPSEC_H__
16 #define __DPDK_IPSEC_H__
18 #include <vnet/vnet.h>
19 #include <vppinfra/cache.h>
20 #include <vnet/ipsec/ipsec.h>
23 #include <rte_config.h>
24 #include <rte_crypto.h>
25 #include <rte_cryptodev.h>
28 #define always_inline static inline
30 #define always_inline static inline __attribute__ ((__always_inline__))
33 #define foreach_dpdk_crypto_input_next \
34 _(DROP, "error-drop") \
35 _(IP4_LOOKUP, "ip4-lookup") \
36 _(IP6_LOOKUP, "ip6-lookup") \
37 _(INTERFACE_OUTPUT, "interface-output") \
38 _(DECRYPT_POST, "dpdk-esp-decrypt-post")
42 #define _(f,s) DPDK_CRYPTO_INPUT_NEXT_##f,
43 foreach_dpdk_crypto_input_next
45 DPDK_CRYPTO_INPUT_N_NEXT,
46 } dpdk_crypto_input_next_t;
48 #define MAX_QP_PER_LCORE 16
63 } dpdk_op_priv_t __attribute__ ((aligned (16)));
68 uword *session_by_drv_id_and_sa_index;
69 u16 cipher_resource_idx[IPSEC_CRYPTO_N_ALG];
70 u16 auth_resource_idx[IPSEC_INTEG_N_ALG];
71 struct rte_crypto_op *ops[VLIB_FRAME_SIZE];
72 } crypto_worker_main_t __attribute__ ((aligned (CLIB_CACHE_LINE_BYTES)));
77 enum rte_crypto_sym_xform_type type;
85 } crypto_alg_t __attribute__ ((aligned (8)));
91 u8 cipher_support[IPSEC_CRYPTO_N_ALG];
92 u8 auth_support[IPSEC_INTEG_N_ALG];
118 struct rte_crypto_op *ops[VLIB_FRAME_SIZE];
119 u32 bi[VLIB_FRAME_SIZE];
120 } crypto_resource_t __attribute__ ((aligned (CLIB_CACHE_LINE_BYTES)));
124 struct rte_mempool *crypto_op;
125 struct rte_mempool *session_h;
126 struct rte_mempool **session_drv;
127 uword *session_by_sa_index;
128 u64 crypto_op_get_failed;
129 u64 session_h_failed;
130 u64 *session_drv_failed;
135 crypto_worker_main_t *workers_main;
136 struct rte_cryptodev_sym_session **sa_session;
138 crypto_resource_t *resource;
139 crypto_alg_t *cipher_algs;
140 crypto_alg_t *auth_algs;
145 } dpdk_crypto_main_t;
147 dpdk_crypto_main_t dpdk_crypto_main;
149 static const u8 pad_data[] =
150 { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0 };
152 void crypto_auto_placement (void);
154 clib_error_t *create_sym_session (struct rte_cryptodev_sym_session **session,
155 u32 sa_idx, crypto_resource_t * res,
156 crypto_worker_main_t * cwm, u8 is_outbound);
158 static_always_inline u32
161 const u32 align = 16;
163 sizeof (struct rte_crypto_op) + sizeof (struct rte_crypto_sym_op);
165 return ((op_size + align - 1) & ~(align - 1)) + sizeof (dpdk_op_priv_t);
168 static_always_inline u32
169 crypto_op_get_priv_offset (void)
171 const u32 align = 16;
174 offset = sizeof (struct rte_crypto_op) + sizeof (struct rte_crypto_sym_op);
175 offset = (offset + align - 1) & ~(align - 1);
180 static_always_inline dpdk_op_priv_t *
181 crypto_op_get_priv (struct rte_crypto_op * op)
183 return (dpdk_op_priv_t *) (((u8 *) op) + crypto_op_get_priv_offset ());
186 /* XXX this requires 64 bit builds so hash_xxx macros use u64 key */
195 } crypto_session_key_t;
197 static_always_inline clib_error_t *
198 crypto_get_session (struct rte_cryptodev_sym_session **session,
200 crypto_resource_t * res,
201 crypto_worker_main_t * cwm, u8 is_outbound)
203 crypto_session_key_t key = { 0 };
205 key.drv_id = res->drv_id;
208 uword *val = hash_get (cwm->session_by_drv_id_and_sa_index, key.val);
210 if (PREDICT_FALSE (!val))
211 return create_sym_session (session, sa_idx, res, cwm, is_outbound);
213 session[0] = (struct rte_cryptodev_sym_session *) val[0];
218 static_always_inline u16
219 get_resource (crypto_worker_main_t * cwm, ipsec_sa_t * sa)
221 u16 cipher_res = cwm->cipher_resource_idx[sa->crypto_alg];
222 u16 auth_res = cwm->auth_resource_idx[sa->integ_alg];
225 /* Not allowed to setup SA with no-aead-cipher/NULL or NULL/NULL */
227 is_aead = ((sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_128) |
228 (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_192) |
229 (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_256));
231 if (sa->crypto_alg == IPSEC_CRYPTO_ALG_NONE)
234 if (cipher_res == auth_res)
243 static_always_inline i32
244 crypto_alloc_ops (u8 numa, struct rte_crypto_op ** ops, u32 n)
246 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
247 crypto_data_t *data = vec_elt_at_index (dcm->data, numa);
250 ret = rte_mempool_get_bulk (data->crypto_op, (void **) ops, n);
252 data->crypto_op_get_failed += ! !ret;
257 static_always_inline void
258 crypto_free_ops (u8 numa, struct rte_crypto_op **ops, u32 n)
260 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
261 crypto_data_t *data = vec_elt_at_index (dcm->data, numa);
266 rte_mempool_put_bulk (data->crypto_op, (void **) ops, n);
269 static_always_inline void
270 crypto_enqueue_ops (vlib_main_t * vm, crypto_worker_main_t * cwm, u8 outbound,
271 u32 node_index, u32 error, u8 numa)
273 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
274 crypto_resource_t *res;
278 vec_foreach (res_idx, cwm->resource_idx)
281 res = vec_elt_at_index (dcm->resource, res_idx[0]);
286 enq = rte_cryptodev_enqueue_burst (res->dev_id, res->qp_id + outbound,
287 res->ops, res->n_ops);
288 res->inflights[outbound] += enq;
290 if (PREDICT_FALSE (enq < res->n_ops))
292 crypto_free_ops (numa, &res->ops[enq], res->n_ops - enq);
293 vlib_buffer_free (vm, &res->bi[enq], res->n_ops - enq);
295 vlib_node_increment_counter (vm, node_index, error,
303 static_always_inline void
304 crypto_set_icb (dpdk_gcm_cnt_blk * icb, u32 salt, u32 seq, u32 seq_hi)
310 icb->cnt = clib_host_to_net_u32 (1);
314 #define __unused __attribute__((unused))
315 static_always_inline void
316 crypto_op_setup (u8 is_aead, struct rte_mbuf *mb0,
317 struct rte_crypto_op *op, void *session,
318 u32 cipher_off, u32 cipher_len,
319 u8 * icb __unused, u32 iv_size __unused,
320 u32 auth_off, u32 auth_len,
321 u8 * aad __unused, u32 aad_size __unused,
322 u8 * digest, u64 digest_paddr, u32 digest_size __unused)
324 struct rte_crypto_sym_op *sym_op;
326 sym_op = (struct rte_crypto_sym_op *) (op + 1);
329 sym_op->session = session;
332 digest_paddr = mb0->buf_physaddr + ((u8 *) digest) - ((u8 *) mb0);
335 sym_op->cipher.data.offset = cipher_off;
336 sym_op->cipher.data.length = cipher_len;
338 sym_op->cipher.iv.data = icb;
339 sym_op->cipher.iv.phys_addr =
340 op->phys_addr + (uintptr_t) icb - (uintptr_t) op;
341 sym_op->cipher.iv.length = iv_size;
345 sym_op->auth.aad.data = aad;
346 sym_op->auth.aad.phys_addr =
347 op->phys_addr + (uintptr_t) aad - (uintptr_t) op;
348 sym_op->auth.aad.length = aad_size;
352 sym_op->auth.data.offset = auth_off;
353 sym_op->auth.data.length = auth_len;
356 sym_op->auth.digest.data = digest;
357 sym_op->auth.digest.phys_addr = digest_paddr;
358 sym_op->auth.digest.length = digest_size;
359 #else /* ! DPDK_NO_AEAD */
362 sym_op->aead.data.offset = cipher_off;
363 sym_op->aead.data.length = cipher_len;
365 sym_op->aead.aad.data = aad;
366 sym_op->aead.aad.phys_addr =
367 op->phys_addr + (uintptr_t) aad - (uintptr_t) op;
369 sym_op->aead.digest.data = digest;
370 sym_op->aead.digest.phys_addr = digest_paddr;
374 sym_op->cipher.data.offset = cipher_off;
375 sym_op->cipher.data.length = cipher_len;
377 sym_op->auth.data.offset = auth_off;
378 sym_op->auth.data.length = auth_len;
380 sym_op->auth.digest.data = digest;
381 sym_op->auth.digest.phys_addr = digest_paddr;
383 #endif /* DPDK_NO_AEAD */
388 #endif /* __DPDK_IPSEC_H__ */
391 * fd.io coding-style-patch-verification: ON
394 * eval: (c-set-style "gnu")