ipsec: add support for chained buffers
[vpp.git] / src / vnet / crypto / crypto.h
1 /*
2  * Copyright (c) 2019 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15
16 #ifndef included_vnet_crypto_crypto_h
17 #define included_vnet_crypto_crypto_h
18
19 #define VNET_CRYPTO_RING_SIZE 512
20
21 #include <vlib/vlib.h>
22
23 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
24 #define foreach_crypto_cipher_alg \
25   _(DES_CBC,     "des-cbc", 7) \
26   _(3DES_CBC,    "3des-cbc", 24) \
27   _(AES_128_CBC, "aes-128-cbc", 16) \
28   _(AES_192_CBC, "aes-192-cbc", 24) \
29   _(AES_256_CBC, "aes-256-cbc", 32) \
30   _(AES_128_CTR, "aes-128-ctr", 16) \
31   _(AES_192_CTR, "aes-192-ctr", 24) \
32   _(AES_256_CTR, "aes-256-ctr", 32)
33
34 /* CRYPTO_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES */
35 #define foreach_crypto_aead_alg \
36   _(AES_128_GCM, "aes-128-gcm", 16) \
37   _(AES_192_GCM, "aes-192-gcm", 24) \
38   _(AES_256_GCM, "aes-256-gcm", 32)
39
40 #define foreach_crypto_hmac_alg \
41   _(MD5, "md5") \
42   _(SHA1, "sha-1") \
43   _(SHA224, "sha-224")  \
44   _(SHA256, "sha-256")  \
45   _(SHA384, "sha-384")  \
46   _(SHA512, "sha-512")
47
48
49 #define foreach_crypto_op_type \
50   _(ENCRYPT, "encrypt") \
51   _(DECRYPT, "decrypt") \
52   _(AEAD_ENCRYPT, "aead-encrypt") \
53   _(AEAD_DECRYPT, "aead-decrypt") \
54   _(HMAC, "hmac")
55
56 typedef enum
57 {
58 #define _(n, s) VNET_CRYPTO_OP_TYPE_##n,
59   foreach_crypto_op_type
60 #undef _
61     VNET_CRYPTO_OP_N_TYPES,
62 } vnet_crypto_op_type_t;
63
64 #define foreach_crypto_op_status \
65   _(PENDING, "pending") \
66   _(COMPLETED, "completed") \
67   _(FAIL_NO_HANDLER, "no-handler") \
68   _(FAIL_BAD_HMAC, "bad-hmac")
69
70 typedef enum
71 {
72   VNET_CRYPTO_KEY_OP_ADD,
73   VNET_CRYPTO_KEY_OP_DEL,
74   VNET_CRYPTO_KEY_OP_MODIFY,
75 } vnet_crypto_key_op_t;
76
77 typedef enum
78 {
79 #define _(n, s) VNET_CRYPTO_OP_STATUS_##n,
80   foreach_crypto_op_status
81 #undef _
82     VNET_CRYPTO_OP_N_STATUS,
83 } vnet_crypto_op_status_t;
84
85 /* *INDENT-OFF* */
86 typedef enum
87 {
88   VNET_CRYPTO_ALG_NONE = 0,
89 #define _(n, s, l) VNET_CRYPTO_ALG_##n,
90   foreach_crypto_cipher_alg
91   foreach_crypto_aead_alg
92 #undef _
93 #define _(n, s) VNET_CRYPTO_ALG_HMAC_##n,
94   foreach_crypto_hmac_alg
95 #undef _
96   VNET_CRYPTO_N_ALGS,
97 } vnet_crypto_alg_t;
98
99 typedef struct
100 {
101   u8 *data;
102   vnet_crypto_alg_t alg:8;
103 } vnet_crypto_key_t;
104
105 typedef enum
106 {
107   VNET_CRYPTO_OP_NONE = 0,
108 #define _(n, s, l) VNET_CRYPTO_OP_##n##_ENC, VNET_CRYPTO_OP_##n##_DEC,
109   foreach_crypto_cipher_alg
110   foreach_crypto_aead_alg
111 #undef _
112 #define _(n, s) VNET_CRYPTO_OP_##n##_HMAC,
113  foreach_crypto_hmac_alg
114 #undef _
115     VNET_CRYPTO_N_OP_IDS,
116 } vnet_crypto_op_id_t;
117 /* *INDENT-ON* */
118
119 typedef enum
120 {
121   CRYPTO_OP_SIMPLE,
122   CRYPTO_OP_CHAINED,
123   CRYPTO_OP_BOTH,
124 } crypto_op_class_type_t;
125
126 typedef struct
127 {
128   char *name;
129   vnet_crypto_op_id_t op_by_type[VNET_CRYPTO_OP_N_TYPES];
130 } vnet_crypto_alg_data_t;
131
132 typedef struct
133 {
134   u8 *src;
135   u8 *dst;
136   u32 len;
137 } vnet_crypto_op_chunk_t;
138
139 typedef struct
140 {
141   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
142   uword user_data;
143   vnet_crypto_op_id_t op:16;
144   vnet_crypto_op_status_t status:8;
145   u8 flags;
146 #define VNET_CRYPTO_OP_FLAG_INIT_IV (1 << 0)
147 #define VNET_CRYPTO_OP_FLAG_HMAC_CHECK (1 << 1)
148 #define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS (1 << 2)
149
150   union
151   {
152     u8 digest_len;
153     u8 tag_len;
154   };
155   u16 aad_len;
156
157   union
158   {
159     struct
160     {
161       u8 *src;
162       u8 *dst;
163     };
164
165     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
166     u16 n_chunks;
167   };
168
169   union
170   {
171     u32 len;
172     /* valid if VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS is set */
173     u32 chunk_index;
174   };
175
176   u32 key_index;
177   u8 *iv;
178   u8 *aad;
179
180   union
181   {
182     u8 *tag;
183     u8 *digest;
184   };
185 } vnet_crypto_op_t;
186
187 STATIC_ASSERT_SIZEOF (vnet_crypto_op_t, CLIB_CACHE_LINE_BYTES);
188
189 typedef struct
190 {
191   vnet_crypto_op_type_t type;
192   vnet_crypto_alg_t alg;
193   u32 active_engine_index_simple;
194   u32 active_engine_index_chained;
195 } vnet_crypto_op_data_t;
196
197 typedef struct
198 {
199   CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
200   clib_bitmap_t *act_queues;
201 } vnet_crypto_thread_t;
202
203 typedef u32 vnet_crypto_key_index_t;
204
205 typedef u32 (vnet_crypto_chained_ops_handler_t) (vlib_main_t * vm,
206                                                  vnet_crypto_op_t * ops[],
207                                                  vnet_crypto_op_chunk_t *
208                                                  chunks, u32 n_ops);
209
210 typedef u32 (vnet_crypto_ops_handler_t) (vlib_main_t * vm,
211                                          vnet_crypto_op_t * ops[], u32 n_ops);
212
213 typedef void (vnet_crypto_key_handler_t) (vlib_main_t * vm,
214                                           vnet_crypto_key_op_t kop,
215                                           vnet_crypto_key_index_t idx);
216
217 u32 vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
218                                  char *desc);
219
220 void vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
221                                        vnet_crypto_op_id_t opt,
222                                        vnet_crypto_ops_handler_t * oph);
223
224 void vnet_crypto_register_chained_ops_handler (vlib_main_t * vm,
225                                                u32 engine_index,
226                                                vnet_crypto_op_id_t opt,
227                                                vnet_crypto_chained_ops_handler_t
228                                                * oph);
229 void vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
230                                         vnet_crypto_op_id_t opt,
231                                         vnet_crypto_ops_handler_t * fn,
232                                         vnet_crypto_chained_ops_handler_t *
233                                         cfn);
234
235 void vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
236                                        vnet_crypto_key_handler_t * keyh);
237
238 typedef struct
239 {
240   char *name;
241   char *desc;
242   int priority;
243   vnet_crypto_key_handler_t *key_op_handler;
244   vnet_crypto_ops_handler_t *ops_handlers[VNET_CRYPTO_N_OP_IDS];
245     vnet_crypto_chained_ops_handler_t
246     * chained_ops_handlers[VNET_CRYPTO_N_OP_IDS];
247 } vnet_crypto_engine_t;
248
249 typedef struct
250 {
251   vnet_crypto_alg_data_t *algs;
252   vnet_crypto_thread_t *threads;
253   vnet_crypto_ops_handler_t **ops_handlers;
254   vnet_crypto_chained_ops_handler_t **chained_ops_handlers;
255   vnet_crypto_op_data_t opt_data[VNET_CRYPTO_N_OP_IDS];
256   vnet_crypto_engine_t *engines;
257   vnet_crypto_key_t *keys;
258   uword *engine_index_by_name;
259   uword *alg_index_by_name;
260 } vnet_crypto_main_t;
261
262 extern vnet_crypto_main_t crypto_main;
263
264 u32 vnet_crypto_submit_ops (vlib_main_t * vm, vnet_crypto_op_t ** jobs,
265                             u32 n_jobs);
266
267 u32 vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
268                                      vnet_crypto_op_chunk_t * chunks,
269                                      u32 n_ops);
270 u32 vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
271                              u32 n_ops);
272
273 int vnet_crypto_set_handler2 (char *ops_handler_name, char *engine,
274                               crypto_op_class_type_t oct);
275 int vnet_crypto_is_set_handler (vnet_crypto_alg_t alg);
276
277 u32 vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg,
278                          u8 * data, u16 length);
279 void vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index);
280
281 format_function_t format_vnet_crypto_alg;
282 format_function_t format_vnet_crypto_engine;
283 format_function_t format_vnet_crypto_op;
284 format_function_t format_vnet_crypto_op_type;
285 format_function_t format_vnet_crypto_op_status;
286 unformat_function_t unformat_vnet_crypto_alg;
287
288 static_always_inline void
289 vnet_crypto_op_init (vnet_crypto_op_t * op, vnet_crypto_op_id_t type)
290 {
291   if (CLIB_DEBUG > 0)
292     clib_memset (op, 0xfe, sizeof (*op));
293   op->op = type;
294   op->flags = 0;
295   op->key_index = ~0;
296   op->n_chunks = 0;
297 }
298
299 static_always_inline vnet_crypto_op_type_t
300 vnet_crypto_get_op_type (vnet_crypto_op_id_t id)
301 {
302   vnet_crypto_main_t *cm = &crypto_main;
303   ASSERT (id < VNET_CRYPTO_N_OP_IDS);
304   vnet_crypto_op_data_t *od = cm->opt_data + id;
305   return od->type;
306 }
307
308 static_always_inline vnet_crypto_key_t *
309 vnet_crypto_get_key (vnet_crypto_key_index_t index)
310 {
311   vnet_crypto_main_t *cm = &crypto_main;
312   return vec_elt_at_index (cm->keys, index);
313 }
314
315 static_always_inline int
316 vnet_crypto_set_handler (char *alg_name, char *engine)
317 {
318   return vnet_crypto_set_handler2 (alg_name, engine, CRYPTO_OP_BOTH);
319 }
320
321 #endif /* included_vnet_crypto_crypto_h */
322
323 /*
324  * fd.io coding-style-patch-verification: ON
325  *
326  * Local Variables:
327  * eval: (c-set-style "gnu")
328  * End:
329  */