58c2397d8e5e7280402f871f36dca6a304e42eab
[vpp.git] / src / plugins / dpdk / cryptodev / cryptodev.h
1 /*
2  *------------------------------------------------------------------
3  * Copyright (c) 2019 - 2021 Intel and/or its affiliates.
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at:
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  *------------------------------------------------------------------
16  */
17 #ifndef included_cryptodev_h
18 #define included_cryptodev_h
19
20 #include <vnet/crypto/crypto.h>
21 #undef always_inline
22 #include <rte_cryptodev.h>
23
24 #define CRYPTODEV_NB_CRYPTO_OPS    1024
25 #define CRYPTODEV_CACHE_QUEUE_SIZE VNET_CRYPTO_FRAME_POOL_SIZE
26 #define CRYPTODEV_CACHE_QUEUE_MASK (VNET_CRYPTO_FRAME_POOL_SIZE - 1)
27 #define CRYPTODEV_MAX_INFLIGHT     (CRYPTODEV_NB_CRYPTO_OPS - 1)
28 #define CRYPTODEV_AAD_MASK         (CRYPTODEV_NB_CRYPTO_OPS - 1)
29 #define CRYPTODE_ENQ_MAX           64
30 #define CRYPTODE_DEQ_MAX           64
31 #define CRYPTODEV_DEQ_CACHE_SZ     32
32 #define CRYPTODEV_NB_SESSION       4096
33 #define CRYPTODEV_MAX_IV_SIZE      16
34 #define CRYPTODEV_MAX_AAD_SIZE     16
35 #define CRYPTODEV_MAX_N_SGL        8 /**< maximum number of segments */
36
37 #define CRYPTODEV_IV_OFFSET  (offsetof (cryptodev_op_t, iv))
38 #define CRYPTODEV_AAD_OFFSET (offsetof (cryptodev_op_t, aad))
39
40 /* VNET_CRYPTO_ALGO, TYPE, DPDK_CRYPTO_ALGO, IV_LEN, TAG_LEN, AAD_LEN, KEY_LEN
41  */
42 #define foreach_vnet_aead_crypto_conversion                                   \
43   _ (AES_128_GCM, AEAD, AES_GCM, 12, 16, 8, 16)                               \
44   _ (AES_128_GCM, AEAD, AES_GCM, 12, 16, 12, 16)                              \
45   _ (AES_192_GCM, AEAD, AES_GCM, 12, 16, 8, 24)                               \
46   _ (AES_192_GCM, AEAD, AES_GCM, 12, 16, 12, 24)                              \
47   _ (AES_256_GCM, AEAD, AES_GCM, 12, 16, 8, 32)                               \
48   _ (AES_256_GCM, AEAD, AES_GCM, 12, 16, 12, 32)                              \
49   _ (CHACHA20_POLY1305, AEAD, CHACHA20_POLY1305, 12, 16, 0, 32)               \
50   _ (CHACHA20_POLY1305, AEAD, CHACHA20_POLY1305, 12, 16, 8, 32)               \
51   _ (CHACHA20_POLY1305, AEAD, CHACHA20_POLY1305, 12, 16, 12, 32)
52
53 /**
54  * crypto (alg, cryptodev_alg, key_size), hash (alg, digest-size)
55  **/
56 #define foreach_cryptodev_link_async_alg                                      \
57   _ (AES_128_CBC, AES_CBC, 16, MD5, 12)                                       \
58   _ (AES_192_CBC, AES_CBC, 24, MD5, 12)                                       \
59   _ (AES_256_CBC, AES_CBC, 32, MD5, 12)                                       \
60   _ (AES_128_CBC, AES_CBC, 16, SHA1, 12)                                      \
61   _ (AES_192_CBC, AES_CBC, 24, SHA1, 12)                                      \
62   _ (AES_256_CBC, AES_CBC, 32, SHA1, 12)                                      \
63   _ (AES_128_CBC, AES_CBC, 16, SHA224, 14)                                    \
64   _ (AES_192_CBC, AES_CBC, 24, SHA224, 14)                                    \
65   _ (AES_256_CBC, AES_CBC, 32, SHA224, 14)                                    \
66   _ (AES_128_CBC, AES_CBC, 16, SHA256, 16)                                    \
67   _ (AES_192_CBC, AES_CBC, 24, SHA256, 16)                                    \
68   _ (AES_256_CBC, AES_CBC, 32, SHA256, 16)                                    \
69   _ (AES_128_CBC, AES_CBC, 16, SHA384, 24)                                    \
70   _ (AES_192_CBC, AES_CBC, 24, SHA384, 24)                                    \
71   _ (AES_256_CBC, AES_CBC, 32, SHA384, 24)                                    \
72   _ (AES_128_CBC, AES_CBC, 16, SHA512, 32)                                    \
73   _ (AES_192_CBC, AES_CBC, 24, SHA512, 32)                                    \
74   _ (AES_256_CBC, AES_CBC, 32, SHA512, 32)                                    \
75   _ (AES_128_CTR, AES_CTR, 16, SHA1, 12)                                      \
76   _ (AES_192_CTR, AES_CTR, 24, SHA1, 12)                                      \
77   _ (AES_256_CTR, AES_CTR, 32, SHA1, 12)
78
79 typedef enum
80 {
81   CRYPTODEV_OP_TYPE_ENCRYPT = 0,
82   CRYPTODEV_OP_TYPE_DECRYPT,
83   CRYPTODEV_N_OP_TYPES,
84 } cryptodev_op_type_t;
85
86 #if RTE_VERSION >= RTE_VERSION_NUM(22, 11, 0, 0)
87 typedef void cryptodev_session_t;
88 #else
89 typedef struct rte_cryptodev_sym_session cryptodev_session_t;
90 #endif
91
92 /* Cryptodev session data, one data per direction per numa */
93 typedef struct
94 {
95   cryptodev_session_t ***keys;
96 } cryptodev_key_t;
97
98 /* Replicate DPDK rte_cryptodev_sym_capability structure with key size ranges
99  * in favor of vpp vector */
100 typedef struct
101 {
102   enum rte_crypto_sym_xform_type xform_type;
103   union
104   {
105     struct
106     {
107       enum rte_crypto_auth_algorithm algo; /*auth algo */
108       u32 *digest_sizes;                   /* vector of auth digest sizes */
109     } auth;
110     struct
111     {
112       enum rte_crypto_cipher_algorithm algo; /* cipher algo */
113       u32 *key_sizes;                        /* vector of cipher key sizes */
114     } cipher;
115     struct
116     {
117       enum rte_crypto_aead_algorithm algo; /* aead algo */
118       u32 *key_sizes;                      /*vector of aead key sizes */
119       u32 *aad_sizes;                      /*vector of aad sizes */
120       u32 *digest_sizes;                   /* vector of aead digest sizes */
121     } aead;
122   };
123 } cryptodev_capability_t;
124
125 /* Cryptodev instance data */
126 typedef struct
127 {
128   u32 dev_id;
129   u32 q_id;
130   char *desc;
131 } cryptodev_inst_t;
132
133 typedef struct
134 {
135   struct rte_mempool *sess_pool;
136 #if RTE_VERSION < RTE_VERSION_NUM(22, 11, 0, 0)
137   struct rte_mempool *sess_priv_pool;
138 #endif
139 } cryptodev_session_pool_t;
140
141 typedef struct
142 {
143   cryptodev_session_pool_t *sess_pools;
144 } cryptodev_numa_data_t;
145
146 typedef struct
147 {
148   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
149   struct rte_crypto_op op;
150   struct rte_crypto_sym_op sop;
151   u8 iv[CRYPTODEV_MAX_IV_SIZE];
152   u8 aad[CRYPTODEV_MAX_AAD_SIZE];
153   vnet_crypto_async_frame_t *frame;
154   u32 n_elts;
155 } cryptodev_op_t;
156
157 typedef struct
158 {
159   vnet_crypto_async_frame_t *f;
160
161   u8 enqueued;
162   u8 dequeued;
163   u8 deq_state;
164   u8 frame_inflight;
165
166   u8 op_type;
167   u8 aad_len;
168   u8 n_elts;
169   u8 reserved;
170 } cryptodev_async_ring_elt;
171
172 typedef struct
173 {
174   cryptodev_async_ring_elt frames[VNET_CRYPTO_FRAME_POOL_SIZE];
175   uint16_t head;
176   uint16_t tail;
177   uint16_t enq; /*record the frame currently being enqueued */
178   uint16_t deq; /*record the frame currently being dequeued */
179 } cryptodev_async_frame_sw_ring;
180
181 typedef struct
182 {
183   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
184   vlib_buffer_t *b[VNET_CRYPTO_FRAME_SIZE];
185   union
186   {
187     struct
188     {
189       cryptodev_op_t **cops;
190       struct rte_mempool *cop_pool;
191       struct rte_ring *ring;
192     };
193     struct
194     {
195       struct rte_crypto_raw_dp_ctx *ctx;
196       struct rte_ring *cached_frame;
197       u16 aad_index;
198       u8 *aad_buf;
199       u64 aad_phy_addr;
200       cryptodev_session_t *reset_sess;
201     };
202   };
203
204   cryptodev_async_frame_sw_ring frame_ring;
205   u16 cryptodev_id;
206   u16 cryptodev_q;
207   u16 frames_on_ring;
208   u16 enqueued_not_dequeueq;
209   u16 deqeued_not_returned;
210   u16 pending_to_qat;
211   u16 inflight;
212 } cryptodev_engine_thread_t;
213
214 typedef struct
215 {
216   cryptodev_numa_data_t *per_numa_data;
217   cryptodev_key_t *keys;
218   cryptodev_engine_thread_t *per_thread_data;
219   enum rte_iova_mode iova_mode;
220   cryptodev_inst_t *cryptodev_inst;
221   clib_bitmap_t *active_cdev_inst_mask;
222   clib_spinlock_t tlock;
223   cryptodev_capability_t *supported_caps;
224   u32 sess_sz;
225   u32 drivers_cnt;
226   u8 is_raw_api;
227 #if RTE_VERSION >= RTE_VERSION_NUM(22, 11, 0, 0)
228   u8 driver_id;
229 #endif
230 } cryptodev_main_t;
231
232 extern cryptodev_main_t cryptodev_main;
233
234 static_always_inline void
235 cryptodev_mark_frame_err_status (vnet_crypto_async_frame_t *f,
236                                  vnet_crypto_op_status_t s)
237 {
238   u32 n_elts = f->n_elts, i;
239
240   for (i = 0; i < n_elts; i++)
241     f->elts[i].status = s;
242   f->state = VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED;
243 }
244
245 int cryptodev_session_create (vlib_main_t *vm, vnet_crypto_key_index_t idx,
246                               u32 aad_len);
247
248 void cryptodev_sess_handler (vlib_main_t *vm, vnet_crypto_key_op_t kop,
249                              vnet_crypto_key_index_t idx, u32 aad_len);
250
251 int cryptodev_check_cap_support (struct rte_cryptodev_sym_capability_idx *idx,
252                                  u32 key_size, u32 digest_size, u32 aad_size);
253
254 clib_error_t *cryptodev_register_cop_hdl (vlib_main_t *vm, u32 eidx);
255
256 clib_error_t *__clib_weak cryptodev_register_raw_hdl (vlib_main_t *vm,
257                                                       u32 eidx);
258
259 clib_error_t *__clib_weak dpdk_cryptodev_init (vlib_main_t *vm);
260
261 #endif