2 *------------------------------------------------------------------
3 * Copyright (c) 2019 Cisco and/or its affiliates.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at:
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *------------------------------------------------------------------
18 #include <openssl/evp.h>
19 #include <openssl/hmac.h>
20 #include <openssl/rand.h>
21 #include <openssl/sha.h>
23 #include <vlib/vlib.h>
24 #include <vnet/plugin/plugin.h>
25 #include <vnet/crypto/crypto.h>
26 #include <vpp/app/version.h>
30 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
31 EVP_CIPHER_CTX *evp_cipher_ctx;
34 #if OPENSSL_VERSION_NUMBER < 0x10100000L
37 } openssl_per_thread_data_t;
39 static openssl_per_thread_data_t *per_thread_data = 0;
41 #define foreach_openssl_aes_evp_op \
42 _ (cbc, DES_CBC, EVP_des_cbc, 8) \
43 _ (cbc, 3DES_CBC, EVP_des_ede3_cbc, 8) \
44 _ (cbc, AES_128_CBC, EVP_aes_128_cbc, 16) \
45 _ (cbc, AES_192_CBC, EVP_aes_192_cbc, 16) \
46 _ (cbc, AES_256_CBC, EVP_aes_256_cbc, 16) \
47 _ (gcm, AES_128_GCM, EVP_aes_128_gcm, 8) \
48 _ (gcm, AES_192_GCM, EVP_aes_192_gcm, 8) \
49 _ (gcm, AES_256_GCM, EVP_aes_256_gcm, 8) \
50 _ (cbc, AES_128_CTR, EVP_aes_128_ctr, 8) \
51 _ (cbc, AES_192_CTR, EVP_aes_192_ctr, 8) \
52 _ (cbc, AES_256_CTR, EVP_aes_256_ctr, 8)
54 #define foreach_openssl_chacha20_evp_op \
55 _ (chacha20_poly1305, CHACHA20_POLY1305, EVP_chacha20_poly1305, 8)
57 #if OPENSSL_VERSION_NUMBER >= 0x10100000L
58 #define foreach_openssl_evp_op foreach_openssl_aes_evp_op \
59 foreach_openssl_chacha20_evp_op
61 #define foreach_openssl_evp_op foreach_openssl_aes_evp_op
64 #ifndef EVP_CTRL_AEAD_GET_TAG
65 #define EVP_CTRL_AEAD_GET_TAG EVP_CTRL_GCM_GET_TAG
68 #ifndef EVP_CTRL_AEAD_SET_TAG
69 #define EVP_CTRL_AEAD_SET_TAG EVP_CTRL_GCM_SET_TAG
72 #define foreach_openssl_hash_op \
74 _ (SHA224, EVP_sha224) \
75 _ (SHA256, EVP_sha256) \
76 _ (SHA384, EVP_sha384) \
77 _ (SHA512, EVP_sha512)
79 #define foreach_openssl_hmac_op \
82 _(SHA224, EVP_sha224) \
83 _(SHA256, EVP_sha256) \
84 _(SHA384, EVP_sha384) \
87 static_always_inline u32
88 openssl_ops_enc_cbc (vlib_main_t *vm, vnet_crypto_op_t *ops[],
89 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
90 const EVP_CIPHER *cipher, const int iv_len)
92 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
94 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
95 vnet_crypto_op_chunk_t *chp;
96 u32 i, j, curr_len = 0;
97 u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
99 for (i = 0; i < n_ops; i++)
101 vnet_crypto_op_t *op = ops[i];
102 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
105 if (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV)
106 RAND_bytes (op->iv, iv_len);
108 EVP_EncryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
110 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
111 EVP_CIPHER_CTX_set_padding (ctx, 0);
113 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
115 chp = chunks + op->chunk_index;
117 for (j = 0; j < op->n_chunks; j++)
119 EVP_EncryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
125 if (out_len < curr_len)
126 EVP_EncryptFinal_ex (ctx, out_buf + offset, &out_len);
129 chp = chunks + op->chunk_index;
130 for (j = 0; j < op->n_chunks; j++)
132 clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
139 EVP_EncryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
140 if (out_len < op->len)
141 EVP_EncryptFinal_ex (ctx, op->dst + out_len, &out_len);
143 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
148 static_always_inline u32
149 openssl_ops_dec_cbc (vlib_main_t *vm, vnet_crypto_op_t *ops[],
150 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
151 const EVP_CIPHER *cipher, const int iv_len)
153 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
155 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
156 vnet_crypto_op_chunk_t *chp;
157 u32 i, j, curr_len = 0;
158 u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
160 for (i = 0; i < n_ops; i++)
162 vnet_crypto_op_t *op = ops[i];
163 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
166 EVP_DecryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
168 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
169 EVP_CIPHER_CTX_set_padding (ctx, 0);
171 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
173 chp = chunks + op->chunk_index;
175 for (j = 0; j < op->n_chunks; j++)
177 EVP_DecryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
183 if (out_len < curr_len)
184 EVP_DecryptFinal_ex (ctx, out_buf + offset, &out_len);
187 chp = chunks + op->chunk_index;
188 for (j = 0; j < op->n_chunks; j++)
190 clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
197 EVP_DecryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
198 if (out_len < op->len)
199 EVP_DecryptFinal_ex (ctx, op->dst + out_len, &out_len);
201 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
206 static_always_inline u32
207 openssl_ops_enc_aead (vlib_main_t *vm, vnet_crypto_op_t *ops[],
208 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
209 const EVP_CIPHER *cipher, int is_gcm, const int iv_len)
211 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
213 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
214 vnet_crypto_op_chunk_t *chp;
216 for (i = 0; i < n_ops; i++)
218 vnet_crypto_op_t *op = ops[i];
219 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
222 if (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV)
223 RAND_bytes (op->iv, 8);
225 EVP_EncryptInit_ex (ctx, cipher, 0, 0, 0);
227 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, NULL);
228 EVP_EncryptInit_ex (ctx, 0, 0, key->data, op->iv);
230 EVP_EncryptUpdate (ctx, NULL, &len, op->aad, op->aad_len);
231 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
233 chp = chunks + op->chunk_index;
234 for (j = 0; j < op->n_chunks; j++)
236 EVP_EncryptUpdate (ctx, chp->dst, &len, chp->src, chp->len);
241 EVP_EncryptUpdate (ctx, op->dst, &len, op->src, op->len);
242 EVP_EncryptFinal_ex (ctx, op->dst + len, &len);
243 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_GET_TAG, op->tag_len, op->tag);
244 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
249 static_always_inline u32
250 openssl_ops_enc_gcm (vlib_main_t *vm, vnet_crypto_op_t *ops[],
251 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
252 const EVP_CIPHER *cipher, const int iv_len)
254 return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
255 /* is_gcm */ 1, iv_len);
258 static_always_inline __clib_unused u32
259 openssl_ops_enc_chacha20_poly1305 (vlib_main_t *vm, vnet_crypto_op_t *ops[],
260 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
261 const EVP_CIPHER *cipher, const int iv_len)
263 return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
264 /* is_gcm */ 0, iv_len);
267 static_always_inline u32
268 openssl_ops_dec_aead (vlib_main_t *vm, vnet_crypto_op_t *ops[],
269 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
270 const EVP_CIPHER *cipher, int is_gcm, const int iv_len)
272 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
274 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
275 vnet_crypto_op_chunk_t *chp;
276 u32 i, j, n_fail = 0;
277 for (i = 0; i < n_ops; i++)
279 vnet_crypto_op_t *op = ops[i];
280 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
283 EVP_DecryptInit_ex (ctx, cipher, 0, 0, 0);
285 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, 0);
286 EVP_DecryptInit_ex (ctx, 0, 0, key->data, op->iv);
288 EVP_DecryptUpdate (ctx, 0, &len, op->aad, op->aad_len);
289 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
291 chp = chunks + op->chunk_index;
292 for (j = 0; j < op->n_chunks; j++)
294 EVP_DecryptUpdate (ctx, chp->dst, &len, chp->src, chp->len);
299 EVP_DecryptUpdate (ctx, op->dst, &len, op->src, op->len);
300 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_SET_TAG, op->tag_len, op->tag);
302 if (EVP_DecryptFinal_ex (ctx, op->dst + len, &len) > 0)
303 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
307 op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
310 return n_ops - n_fail;
313 static_always_inline u32
314 openssl_ops_dec_gcm (vlib_main_t *vm, vnet_crypto_op_t *ops[],
315 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
316 const EVP_CIPHER *cipher, const int iv_len)
318 return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
319 /* is_gcm */ 1, iv_len);
322 static_always_inline __clib_unused u32
323 openssl_ops_dec_chacha20_poly1305 (vlib_main_t *vm, vnet_crypto_op_t *ops[],
324 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
325 const EVP_CIPHER *cipher, const int iv_len)
327 return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
328 /* is_gcm */ 0, iv_len);
331 static_always_inline u32
332 openssl_ops_hash (vlib_main_t *vm, vnet_crypto_op_t *ops[],
333 vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_MD *md)
335 openssl_per_thread_data_t *ptd =
336 vec_elt_at_index (per_thread_data, vm->thread_index);
337 EVP_MD_CTX *ctx = ptd->hash_ctx;
338 vnet_crypto_op_chunk_t *chp;
339 u32 md_len, i, j, n_fail = 0;
341 for (i = 0; i < n_ops; i++)
343 vnet_crypto_op_t *op = ops[i];
345 EVP_DigestInit_ex (ctx, md, NULL);
346 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
348 chp = chunks + op->chunk_index;
349 for (j = 0; j < op->n_chunks; j++)
351 EVP_DigestUpdate (ctx, chp->src, chp->len);
356 EVP_DigestUpdate (ctx, op->src, op->len);
358 EVP_DigestFinal_ex (ctx, op->digest, &md_len);
359 op->digest_len = md_len;
360 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
362 return n_ops - n_fail;
365 static_always_inline u32
366 openssl_ops_hmac (vlib_main_t * vm, vnet_crypto_op_t * ops[],
367 vnet_crypto_op_chunk_t * chunks, u32 n_ops,
371 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
373 HMAC_CTX *ctx = ptd->hmac_ctx;
374 vnet_crypto_op_chunk_t *chp;
375 u32 i, j, n_fail = 0;
376 for (i = 0; i < n_ops; i++)
378 vnet_crypto_op_t *op = ops[i];
379 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
380 unsigned int out_len = 0;
381 size_t sz = op->digest_len ? op->digest_len : EVP_MD_size (md);
383 HMAC_Init_ex (ctx, key->data, vec_len (key->data), md, NULL);
384 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
386 chp = chunks + op->chunk_index;
387 for (j = 0; j < op->n_chunks; j++)
389 HMAC_Update (ctx, chp->src, chp->len);
394 HMAC_Update (ctx, op->src, op->len);
395 HMAC_Final (ctx, buffer, &out_len);
397 if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK)
399 if ((memcmp (op->digest, buffer, sz)))
402 op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
407 clib_memcpy_fast (op->digest, buffer, sz);
408 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
410 return n_ops - n_fail;
413 #define _(m, a, b, iv) \
414 static u32 openssl_ops_enc_##a (vlib_main_t *vm, vnet_crypto_op_t *ops[], \
417 return openssl_ops_enc_##m (vm, ops, 0, n_ops, b (), iv); \
420 u32 openssl_ops_dec_##a (vlib_main_t *vm, vnet_crypto_op_t *ops[], \
423 return openssl_ops_dec_##m (vm, ops, 0, n_ops, b (), iv); \
426 static u32 openssl_ops_enc_chained_##a ( \
427 vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
430 return openssl_ops_enc_##m (vm, ops, chunks, n_ops, b (), iv); \
433 static u32 openssl_ops_dec_chained_##a ( \
434 vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
437 return openssl_ops_dec_##m (vm, ops, chunks, n_ops, b (), iv); \
440 foreach_openssl_evp_op;
444 static u32 openssl_ops_hash_##a (vlib_main_t *vm, vnet_crypto_op_t *ops[], \
447 return openssl_ops_hash (vm, ops, 0, n_ops, b ()); \
449 static u32 openssl_ops_hash_chained_##a ( \
450 vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
453 return openssl_ops_hash (vm, ops, chunks, n_ops, b ()); \
456 foreach_openssl_hash_op;
461 openssl_ops_hmac_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
462 { return openssl_ops_hmac (vm, ops, 0, n_ops, b ()); } \
464 openssl_ops_hmac_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
465 vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
466 { return openssl_ops_hmac (vm, ops, chunks, n_ops, b ()); } \
468 foreach_openssl_hmac_op;
473 crypto_openssl_init (vlib_main_t * vm)
475 vlib_thread_main_t *tm = vlib_get_thread_main ();
476 openssl_per_thread_data_t *ptd;
481 u32 eidx = vnet_crypto_register_engine (vm, "openssl", 50, "OpenSSL");
483 #define _(m, a, b, iv) \
484 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
485 openssl_ops_enc_##a, \
486 openssl_ops_enc_chained_##a); \
487 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
488 openssl_ops_dec_##a, \
489 openssl_ops_dec_chained_##a);
491 foreach_openssl_evp_op;
495 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \
496 openssl_ops_hmac_##a, \
497 openssl_ops_hmac_chained_##a); \
499 foreach_openssl_hmac_op;
503 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_HASH, \
504 openssl_ops_hash_##a, \
505 openssl_ops_hash_chained_##a);
507 foreach_openssl_hash_op;
510 vec_validate_aligned (per_thread_data, tm->n_vlib_mains - 1,
511 CLIB_CACHE_LINE_BYTES);
513 vec_foreach (ptd, per_thread_data)
515 ptd->evp_cipher_ctx = EVP_CIPHER_CTX_new ();
516 #if OPENSSL_VERSION_NUMBER >= 0x10100000L
517 ptd->hmac_ctx = HMAC_CTX_new ();
518 ptd->hash_ctx = EVP_MD_CTX_create ();
520 HMAC_CTX_init (&(ptd->_hmac_ctx));
521 ptd->hmac_ctx = &ptd->_hmac_ctx;
527 vec_add (seed_data, &t, sizeof (t));
528 vec_add (seed_data, &pid, sizeof (pid));
529 vec_add (seed_data, seed_data, sizeof (seed_data));
531 RAND_seed ((const void *) seed_data, vec_len (seed_data));
533 vec_free (seed_data);
539 VLIB_INIT_FUNCTION (crypto_openssl_init) =
541 .runs_after = VLIB_INITS ("vnet_crypto_init"),
547 VLIB_PLUGIN_REGISTER () = {
548 .version = VPP_BUILD_VER,
549 .description = "OpenSSL Crypto Engine",
554 * fd.io coding-style-patch-verification: ON
557 * eval: (c-set-style "gnu")