2 *------------------------------------------------------------------
3 * Copyright (c) 2019 Cisco and/or its affiliates.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at:
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *------------------------------------------------------------------
18 #include <openssl/evp.h>
19 #include <openssl/hmac.h>
20 #include <openssl/rand.h>
22 #include <vlib/vlib.h>
23 #include <vnet/plugin/plugin.h>
24 #include <vnet/crypto/crypto.h>
25 #include <vpp/app/version.h>
29 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
30 EVP_CIPHER_CTX *evp_cipher_ctx;
32 #if OPENSSL_VERSION_NUMBER < 0x10100000L
35 } openssl_per_thread_data_t;
37 static openssl_per_thread_data_t *per_thread_data = 0;
39 #define foreach_openssl_aes_evp_op \
40 _(cbc, DES_CBC, EVP_des_cbc) \
41 _(cbc, 3DES_CBC, EVP_des_ede3_cbc) \
42 _(cbc, AES_128_CBC, EVP_aes_128_cbc) \
43 _(cbc, AES_192_CBC, EVP_aes_192_cbc) \
44 _(cbc, AES_256_CBC, EVP_aes_256_cbc) \
45 _(gcm, AES_128_GCM, EVP_aes_128_gcm) \
46 _(gcm, AES_192_GCM, EVP_aes_192_gcm) \
47 _(gcm, AES_256_GCM, EVP_aes_256_gcm) \
48 _(cbc, AES_128_CTR, EVP_aes_128_ctr) \
49 _(cbc, AES_192_CTR, EVP_aes_192_ctr) \
50 _(cbc, AES_256_CTR, EVP_aes_256_ctr) \
52 #define foreach_openssl_chacha20_evp_op \
53 _(chacha20_poly1305, CHACHA20_POLY1305, EVP_chacha20_poly1305) \
55 #if OPENSSL_VERSION_NUMBER >= 0x10100000L
56 #define foreach_openssl_evp_op foreach_openssl_aes_evp_op \
57 foreach_openssl_chacha20_evp_op
59 #define foreach_openssl_evp_op foreach_openssl_aes_evp_op
62 #ifndef EVP_CTRL_AEAD_GET_TAG
63 #define EVP_CTRL_AEAD_GET_TAG EVP_CTRL_GCM_GET_TAG
66 #ifndef EVP_CTRL_AEAD_SET_TAG
67 #define EVP_CTRL_AEAD_SET_TAG EVP_CTRL_GCM_SET_TAG
70 #define foreach_openssl_hmac_op \
73 _(SHA224, EVP_sha224) \
74 _(SHA256, EVP_sha256) \
75 _(SHA384, EVP_sha384) \
78 static_always_inline u32
79 openssl_ops_enc_cbc (vlib_main_t * vm, vnet_crypto_op_t * ops[],
80 vnet_crypto_op_chunk_t * chunks, u32 n_ops,
81 const EVP_CIPHER * cipher)
83 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
85 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
86 vnet_crypto_op_chunk_t *chp;
87 u32 i, j, curr_len = 0;
88 u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
90 for (i = 0; i < n_ops; i++)
92 vnet_crypto_op_t *op = ops[i];
93 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
97 if (op->op == VNET_CRYPTO_OP_3DES_CBC_ENC
98 || op->op == VNET_CRYPTO_OP_DES_CBC_ENC)
103 if (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV)
104 RAND_bytes (op->iv, iv_len);
106 EVP_EncryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
108 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
109 EVP_CIPHER_CTX_set_padding (ctx, 0);
111 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
113 chp = chunks + op->chunk_index;
115 for (j = 0; j < op->n_chunks; j++)
117 EVP_EncryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
123 if (out_len < curr_len)
124 EVP_EncryptFinal_ex (ctx, out_buf + offset, &out_len);
127 chp = chunks + op->chunk_index;
128 for (j = 0; j < op->n_chunks; j++)
130 clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
137 EVP_EncryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
138 if (out_len < op->len)
139 EVP_EncryptFinal_ex (ctx, op->dst + out_len, &out_len);
141 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
146 static_always_inline u32
147 openssl_ops_dec_cbc (vlib_main_t * vm, vnet_crypto_op_t * ops[],
148 vnet_crypto_op_chunk_t * chunks, u32 n_ops,
149 const EVP_CIPHER * cipher)
151 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
153 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
154 vnet_crypto_op_chunk_t *chp;
155 u32 i, j, curr_len = 0;
156 u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
158 for (i = 0; i < n_ops; i++)
160 vnet_crypto_op_t *op = ops[i];
161 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
164 EVP_DecryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
166 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
167 EVP_CIPHER_CTX_set_padding (ctx, 0);
169 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
171 chp = chunks + op->chunk_index;
173 for (j = 0; j < op->n_chunks; j++)
175 EVP_DecryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
181 if (out_len < curr_len)
182 EVP_DecryptFinal_ex (ctx, out_buf + offset, &out_len);
185 chp = chunks + op->chunk_index;
186 for (j = 0; j < op->n_chunks; j++)
188 clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
195 EVP_DecryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
196 if (out_len < op->len)
197 EVP_DecryptFinal_ex (ctx, op->dst + out_len, &out_len);
199 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
204 static_always_inline u32
205 openssl_ops_enc_aead (vlib_main_t * vm, vnet_crypto_op_t * ops[],
206 vnet_crypto_op_chunk_t * chunks, u32 n_ops,
207 const EVP_CIPHER * cipher, int is_gcm)
209 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
211 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
212 vnet_crypto_op_chunk_t *chp;
214 for (i = 0; i < n_ops; i++)
216 vnet_crypto_op_t *op = ops[i];
217 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
220 if (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV)
221 RAND_bytes (op->iv, 8);
223 EVP_EncryptInit_ex (ctx, cipher, 0, 0, 0);
225 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, NULL);
226 EVP_EncryptInit_ex (ctx, 0, 0, key->data, op->iv);
228 EVP_EncryptUpdate (ctx, NULL, &len, op->aad, op->aad_len);
229 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
231 chp = chunks + op->chunk_index;
232 for (j = 0; j < op->n_chunks; j++)
234 EVP_EncryptUpdate (ctx, chp->dst, &len, chp->src, chp->len);
239 EVP_EncryptUpdate (ctx, op->dst, &len, op->src, op->len);
240 EVP_EncryptFinal_ex (ctx, op->dst + len, &len);
241 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_GET_TAG, op->tag_len, op->tag);
242 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
247 static_always_inline u32
248 openssl_ops_enc_gcm (vlib_main_t * vm, vnet_crypto_op_t * ops[],
249 vnet_crypto_op_chunk_t * chunks, u32 n_ops,
250 const EVP_CIPHER * cipher)
252 return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
256 static_always_inline u32
257 openssl_ops_enc_chacha20_poly1305 (vlib_main_t * vm, vnet_crypto_op_t * ops[],
258 vnet_crypto_op_chunk_t * chunks, u32 n_ops,
259 const EVP_CIPHER * cipher)
261 return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
265 static_always_inline u32
266 openssl_ops_dec_aead (vlib_main_t * vm, vnet_crypto_op_t * ops[],
267 vnet_crypto_op_chunk_t * chunks, u32 n_ops,
268 const EVP_CIPHER * cipher, int is_gcm)
270 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
272 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
273 vnet_crypto_op_chunk_t *chp;
274 u32 i, j, n_fail = 0;
275 for (i = 0; i < n_ops; i++)
277 vnet_crypto_op_t *op = ops[i];
278 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
281 EVP_DecryptInit_ex (ctx, cipher, 0, 0, 0);
283 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, 0);
284 EVP_DecryptInit_ex (ctx, 0, 0, key->data, op->iv);
286 EVP_DecryptUpdate (ctx, 0, &len, op->aad, op->aad_len);
287 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
289 chp = chunks + op->chunk_index;
290 for (j = 0; j < op->n_chunks; j++)
292 EVP_DecryptUpdate (ctx, chp->dst, &len, chp->src, chp->len);
297 EVP_DecryptUpdate (ctx, op->dst, &len, op->src, op->len);
298 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_SET_TAG, op->tag_len, op->tag);
300 if (EVP_DecryptFinal_ex (ctx, op->dst + len, &len) > 0)
301 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
305 op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
308 return n_ops - n_fail;
311 static_always_inline u32
312 openssl_ops_dec_gcm (vlib_main_t * vm, vnet_crypto_op_t * ops[],
313 vnet_crypto_op_chunk_t * chunks, u32 n_ops,
314 const EVP_CIPHER * cipher)
316 return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
320 static_always_inline u32
321 openssl_ops_dec_chacha20_poly1305 (vlib_main_t * vm, vnet_crypto_op_t * ops[],
322 vnet_crypto_op_chunk_t * chunks, u32 n_ops,
323 const EVP_CIPHER * cipher)
325 return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
329 static_always_inline u32
330 openssl_ops_hmac (vlib_main_t * vm, vnet_crypto_op_t * ops[],
331 vnet_crypto_op_chunk_t * chunks, u32 n_ops,
335 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
337 HMAC_CTX *ctx = ptd->hmac_ctx;
338 vnet_crypto_op_chunk_t *chp;
339 u32 i, j, n_fail = 0;
340 for (i = 0; i < n_ops; i++)
342 vnet_crypto_op_t *op = ops[i];
343 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
344 unsigned int out_len = 0;
345 size_t sz = op->digest_len ? op->digest_len : EVP_MD_size (md);
347 HMAC_Init_ex (ctx, key->data, vec_len (key->data), md, NULL);
348 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
350 chp = chunks + op->chunk_index;
351 for (j = 0; j < op->n_chunks; j++)
353 HMAC_Update (ctx, chp->src, chp->len);
358 HMAC_Update (ctx, op->src, op->len);
359 HMAC_Final (ctx, buffer, &out_len);
361 if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK)
363 if ((memcmp (op->digest, buffer, sz)))
366 op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
371 clib_memcpy_fast (op->digest, buffer, sz);
372 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
374 return n_ops - n_fail;
379 openssl_ops_enc_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
380 { return openssl_ops_enc_##m (vm, ops, 0, n_ops, b ()); } \
383 openssl_ops_dec_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
384 { return openssl_ops_dec_##m (vm, ops, 0, n_ops, b ()); } \
387 openssl_ops_enc_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
388 vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
389 { return openssl_ops_enc_##m (vm, ops, chunks, n_ops, b ()); } \
392 openssl_ops_dec_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
393 vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
394 { return openssl_ops_dec_##m (vm, ops, chunks, n_ops, b ()); }
396 foreach_openssl_evp_op;
401 openssl_ops_hmac_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
402 { return openssl_ops_hmac (vm, ops, 0, n_ops, b ()); } \
404 openssl_ops_hmac_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
405 vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
406 { return openssl_ops_hmac (vm, ops, chunks, n_ops, b ()); } \
408 foreach_openssl_hmac_op;
413 crypto_openssl_init (vlib_main_t * vm)
415 vlib_thread_main_t *tm = vlib_get_thread_main ();
416 openssl_per_thread_data_t *ptd;
421 u32 eidx = vnet_crypto_register_engine (vm, "openssl", 50, "OpenSSL");
424 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
425 openssl_ops_enc_##a, \
426 openssl_ops_enc_chained_##a); \
427 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
428 openssl_ops_dec_##a, \
429 openssl_ops_dec_chained_##a); \
431 foreach_openssl_evp_op;
435 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \
436 openssl_ops_hmac_##a, \
437 openssl_ops_hmac_chained_##a); \
439 foreach_openssl_hmac_op;
442 vec_validate_aligned (per_thread_data, tm->n_vlib_mains - 1,
443 CLIB_CACHE_LINE_BYTES);
445 vec_foreach (ptd, per_thread_data)
447 ptd->evp_cipher_ctx = EVP_CIPHER_CTX_new ();
448 #if OPENSSL_VERSION_NUMBER >= 0x10100000L
449 ptd->hmac_ctx = HMAC_CTX_new ();
451 HMAC_CTX_init (&(ptd->_hmac_ctx));
452 ptd->hmac_ctx = &ptd->_hmac_ctx;
458 vec_add (seed_data, &t, sizeof (t));
459 vec_add (seed_data, &pid, sizeof (pid));
460 vec_add (seed_data, seed_data, sizeof (seed_data));
462 RAND_seed ((const void *) seed_data, vec_len (seed_data));
464 vec_free (seed_data);
470 VLIB_INIT_FUNCTION (crypto_openssl_init) =
472 .runs_after = VLIB_INITS ("vnet_crypto_init"),
478 VLIB_PLUGIN_REGISTER () = {
479 .version = VPP_BUILD_VER,
480 .description = "OpenSSL Crypto Engine",
485 * fd.io coding-style-patch-verification: ON
488 * eval: (c-set-style "gnu")