2 * Copyright (c) 2016 Intel and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
15 #ifndef __DPDK_ESP_H__
16 #define __DPDK_ESP_H__
18 #include <dpdk/ipsec/ipsec.h>
19 #include <vnet/ipsec/ipsec.h>
20 #include <vnet/ipsec/esp.h>
24 enum rte_crypto_cipher_algorithm algo;
26 enum rte_crypto_aead_algorithm aead_algo;
30 } dpdk_esp_crypto_alg_t;
34 enum rte_crypto_auth_algorithm algo;
36 } dpdk_esp_integ_alg_t;
40 dpdk_esp_crypto_alg_t *esp_crypto_algs;
41 dpdk_esp_integ_alg_t *esp_integ_algs;
44 dpdk_esp_main_t dpdk_esp_main;
46 static_always_inline void
49 dpdk_esp_main_t *em = &dpdk_esp_main;
50 dpdk_esp_integ_alg_t *i;
51 dpdk_esp_crypto_alg_t *c;
53 vec_validate (em->esp_crypto_algs, IPSEC_CRYPTO_N_ALG - 1);
55 c = &em->esp_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_128];
56 c->algo = RTE_CRYPTO_CIPHER_AES_CBC;
60 c = &em->esp_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_192];
61 c->algo = RTE_CRYPTO_CIPHER_AES_CBC;
65 c = &em->esp_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_256];
66 c->algo = RTE_CRYPTO_CIPHER_AES_CBC;
70 c = &em->esp_crypto_algs[IPSEC_CRYPTO_ALG_AES_GCM_128];
72 c->algo = RTE_CRYPTO_CIPHER_AES_GCM;
74 c->aead_algo = RTE_CRYPTO_AEAD_AES_GCM;
79 vec_validate (em->esp_integ_algs, IPSEC_INTEG_N_ALG - 1);
81 i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA1_96];
82 i->algo = RTE_CRYPTO_AUTH_SHA1_HMAC;
85 i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_256_96];
86 i->algo = RTE_CRYPTO_AUTH_SHA256_HMAC;
89 i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_256_128];
90 i->algo = RTE_CRYPTO_AUTH_SHA256_HMAC;
93 i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_384_192];
94 i->algo = RTE_CRYPTO_AUTH_SHA384_HMAC;
97 i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_512_256];
98 i->algo = RTE_CRYPTO_AUTH_SHA512_HMAC;
101 i = &em->esp_integ_algs[IPSEC_INTEG_ALG_AES_GCM_128];
102 i->algo = RTE_CRYPTO_AUTH_AES_GCM;
107 static_always_inline int
108 translate_crypto_algo (ipsec_crypto_alg_t crypto_algo,
109 struct rte_crypto_sym_xform *xform, u8 use_esn)
113 sizeof (struct rte_crypto_op) + sizeof (struct rte_crypto_sym_op) +
114 offsetof (dpdk_cop_priv_t, cb);
117 xform->type = RTE_CRYPTO_SYM_XFORM_CIPHER;
121 case IPSEC_CRYPTO_ALG_NONE:
123 xform->cipher.iv.offset = iv_off;
124 xform->cipher.iv.length = 0;
126 xform->cipher.algo = RTE_CRYPTO_CIPHER_NULL;
128 case IPSEC_CRYPTO_ALG_AES_CBC_128:
129 case IPSEC_CRYPTO_ALG_AES_CBC_192:
130 case IPSEC_CRYPTO_ALG_AES_CBC_256:
132 xform->cipher.iv.offset = iv_off;
133 xform->cipher.iv.length = 16;
135 xform->cipher.algo = RTE_CRYPTO_CIPHER_AES_CBC;
137 case IPSEC_CRYPTO_ALG_AES_GCM_128:
139 xform->cipher.algo = RTE_CRYPTO_CIPHER_AES_GCM;
141 xform->type = RTE_CRYPTO_SYM_XFORM_AEAD;
142 xform->aead.algo = RTE_CRYPTO_AEAD_AES_GCM;
143 xform->aead.iv.offset = iv_off;
144 xform->aead.iv.length = 12; /* GCM IV, not ESP IV */
145 xform->aead.digest_length = 16;
146 xform->aead.aad_length = use_esn ? 12 : 8;
156 static_always_inline int
157 translate_integ_algo (ipsec_integ_alg_t integ_alg,
158 struct rte_crypto_sym_xform *auth_xform, u8 use_esn)
160 auth_xform->type = RTE_CRYPTO_SYM_XFORM_AUTH;
164 case IPSEC_INTEG_ALG_NONE:
165 auth_xform->auth.algo = RTE_CRYPTO_AUTH_NULL;
166 auth_xform->auth.digest_length = 0;
168 case IPSEC_INTEG_ALG_SHA1_96:
169 auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA1_HMAC;
170 auth_xform->auth.digest_length = 12;
172 case IPSEC_INTEG_ALG_SHA_256_96:
173 auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA256_HMAC;
174 auth_xform->auth.digest_length = 12;
176 case IPSEC_INTEG_ALG_SHA_256_128:
177 auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA256_HMAC;
178 auth_xform->auth.digest_length = 16;
180 case IPSEC_INTEG_ALG_SHA_384_192:
181 auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA384_HMAC;
182 auth_xform->auth.digest_length = 24;
184 case IPSEC_INTEG_ALG_SHA_512_256:
185 auth_xform->auth.algo = RTE_CRYPTO_AUTH_SHA512_HMAC;
186 auth_xform->auth.digest_length = 32;
189 case IPSEC_INTEG_ALG_AES_GCM_128:
190 auth_xform->auth.algo = RTE_CRYPTO_AUTH_AES_GCM;
191 auth_xform->auth.digest_length = 16;
192 auth_xform->auth.add_auth_data_length = use_esn ? 12 : 8;
202 static_always_inline i32
203 create_sym_sess (ipsec_sa_t * sa, crypto_sa_session_t * sa_sess,
206 u32 thread_index = vlib_get_thread_index ();
207 dpdk_crypto_main_t *dcm = &dpdk_crypto_main;
208 crypto_worker_main_t *cwm = &dcm->workers_main[thread_index];
209 struct rte_crypto_sym_xform cipher_xform = { 0 };
210 struct rte_crypto_sym_xform auth_xform = { 0 };
211 struct rte_crypto_sym_xform *xfs;
212 uword key = 0, *data;
213 crypto_worker_qp_key_t *p_key = (crypto_worker_qp_key_t *) & key;
215 i32 socket_id = rte_socket_id ();
219 if (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_128)
221 sa->crypto_key_len -= 4;
222 clib_memcpy (&sa->salt, &sa->crypto_key[sa->crypto_key_len], 4);
226 u32 seed = (u32) clib_cpu_time_now ();
227 sa->salt = random_u32 (&seed);
230 if (translate_crypto_algo (sa->crypto_alg, &cipher_xform, sa->use_esn) < 0)
232 p_key->cipher_algo = cipher_xform.cipher.algo;
234 if (translate_integ_algo (sa->integ_alg, &auth_xform, sa->use_esn) < 0)
236 p_key->auth_algo = auth_xform.auth.algo;
239 if (sa->crypto_alg == IPSEC_CRYPTO_ALG_AES_GCM_128)
241 cipher_xform.aead.key.data = sa->crypto_key;
242 cipher_xform.aead.key.length = sa->crypto_key_len;
245 cipher_xform.cipher.op =
246 (enum rte_crypto_cipher_operation) RTE_CRYPTO_AEAD_OP_ENCRYPT;
248 cipher_xform.cipher.op =
249 (enum rte_crypto_cipher_operation) RTE_CRYPTO_AEAD_OP_DECRYPT;
250 cipher_xform.next = NULL;
254 else /* Cipher + Auth */
257 cipher_xform.cipher.key.data = sa->crypto_key;
258 cipher_xform.cipher.key.length = sa->crypto_key_len;
260 auth_xform.auth.key.data = sa->integ_key;
261 auth_xform.auth.key.length = sa->integ_key_len;
265 cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_ENCRYPT;
266 auth_xform.auth.op = RTE_CRYPTO_AUTH_OP_GENERATE;
267 cipher_xform.next = &auth_xform;
272 cipher_xform.cipher.op = RTE_CRYPTO_CIPHER_OP_DECRYPT;
273 auth_xform.auth.op = RTE_CRYPTO_AUTH_OP_VERIFY;
274 auth_xform.next = &cipher_xform;
280 p_key->is_outbound = is_outbound;
282 data = hash_get (cwm->algo_qp_map, key);
288 rte_cryptodev_sym_session_create (cwm->qp_data[*data].dev_id, xfs);
293 rte_cryptodev_sym_session_create (dcm->sess_h_pools[socket_id]);
298 rte_cryptodev_sym_session_init (cwm->qp_data[*data].dev_id, sa_sess->sess,
299 xfs, dcm->sess_pools[socket_id]);
304 sa_sess->qp_index = (u8) * data;
309 static_always_inline void
310 crypto_set_icb (dpdk_gcm_cnt_blk * icb, u32 salt, u32 seq, u32 seq_hi)
316 icb->cnt = clib_host_to_net_u32 (1);
320 #define __unused __attribute__((unused))
321 static_always_inline void
322 crypto_op_setup (u8 is_aead, struct rte_mbuf *mb0,
323 struct rte_crypto_op *cop, void *session,
324 u32 cipher_off, u32 cipher_len,
325 u8 * icb __unused, u32 iv_size __unused,
326 u32 auth_off, u32 auth_len,
327 u8 * aad __unused, u32 aad_size __unused,
328 u8 * digest, u64 digest_paddr, u32 digest_size __unused)
330 struct rte_crypto_sym_op *sym_cop;
332 sym_cop = (struct rte_crypto_sym_op *) (cop + 1);
334 sym_cop->m_src = mb0;
335 rte_crypto_op_attach_sym_session (cop, session);
339 rte_pktmbuf_mtophys_offset (mb0, (uintptr_t) digest - (uintptr_t) mb0);
342 sym_cop->cipher.data.offset = cipher_off;
343 sym_cop->cipher.data.length = cipher_len;
345 sym_cop->cipher.iv.data = icb;
346 sym_cop->cipher.iv.phys_addr =
347 cop->phys_addr + (uintptr_t) icb - (uintptr_t) cop;
348 sym_cop->cipher.iv.length = iv_size;
352 sym_cop->auth.aad.data = aad;
353 sym_cop->auth.aad.phys_addr =
354 cop->phys_addr + (uintptr_t) aad - (uintptr_t) cop;
355 sym_cop->auth.aad.length = aad_size;
359 sym_cop->auth.data.offset = auth_off;
360 sym_cop->auth.data.length = auth_len;
363 sym_cop->auth.digest.data = digest;
364 sym_cop->auth.digest.phys_addr = digest_paddr;
365 sym_cop->auth.digest.length = digest_size;
366 #else /* ! DPDK_NO_AEAD */
369 sym_cop->aead.data.offset = cipher_off;
370 sym_cop->aead.data.length = cipher_len;
372 sym_cop->aead.aad.data = aad;
373 sym_cop->aead.aad.phys_addr =
374 cop->phys_addr + (uintptr_t) aad - (uintptr_t) cop;
376 sym_cop->aead.digest.data = digest;
377 sym_cop->aead.digest.phys_addr = digest_paddr;
381 sym_cop->cipher.data.offset = cipher_off;
382 sym_cop->cipher.data.length = cipher_len;
384 sym_cop->auth.data.offset = auth_off;
385 sym_cop->auth.data.length = auth_len;
387 sym_cop->auth.digest.data = digest;
388 sym_cop->auth.digest.phys_addr = digest_paddr;
390 #endif /* DPDK_NO_AEAD */
395 #endif /* __DPDK_ESP_H__ */
398 * fd.io coding-style-patch-verification: ON
401 * eval: (c-set-style "gnu")