2 *------------------------------------------------------------------
3 * Copyright (c) 2019 Cisco and/or its affiliates.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at:
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *------------------------------------------------------------------
18 #include <sys/syscall.h>
20 #include <openssl/evp.h>
21 #include <openssl/hmac.h>
22 #include <openssl/rand.h>
23 #include <openssl/sha.h>
25 #include <vlib/vlib.h>
26 #include <vnet/plugin/plugin.h>
27 #include <vnet/crypto/crypto.h>
28 #include <vpp/app/version.h>
32 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
33 EVP_CIPHER_CTX *evp_cipher_ctx;
36 #if OPENSSL_VERSION_NUMBER < 0x10100000L
39 } openssl_per_thread_data_t;
41 static openssl_per_thread_data_t *per_thread_data = 0;
43 #define foreach_openssl_aes_evp_op \
44 _ (cbc, DES_CBC, EVP_des_cbc, 8) \
45 _ (cbc, 3DES_CBC, EVP_des_ede3_cbc, 8) \
46 _ (cbc, AES_128_CBC, EVP_aes_128_cbc, 16) \
47 _ (cbc, AES_192_CBC, EVP_aes_192_cbc, 16) \
48 _ (cbc, AES_256_CBC, EVP_aes_256_cbc, 16) \
49 _ (gcm, AES_128_GCM, EVP_aes_128_gcm, 8) \
50 _ (gcm, AES_192_GCM, EVP_aes_192_gcm, 8) \
51 _ (gcm, AES_256_GCM, EVP_aes_256_gcm, 8) \
52 _ (cbc, AES_128_CTR, EVP_aes_128_ctr, 8) \
53 _ (cbc, AES_192_CTR, EVP_aes_192_ctr, 8) \
54 _ (cbc, AES_256_CTR, EVP_aes_256_ctr, 8) \
55 _ (null_gmac, AES_128_NULL_GMAC, EVP_aes_128_gcm, 8) \
56 _ (null_gmac, AES_192_NULL_GMAC, EVP_aes_192_gcm, 8) \
57 _ (null_gmac, AES_256_NULL_GMAC, EVP_aes_256_gcm, 8)
59 #define foreach_openssl_chacha20_evp_op \
60 _ (chacha20_poly1305, CHACHA20_POLY1305, EVP_chacha20_poly1305, 8)
62 #if OPENSSL_VERSION_NUMBER >= 0x10100000L
63 #define foreach_openssl_evp_op foreach_openssl_aes_evp_op \
64 foreach_openssl_chacha20_evp_op
66 #define foreach_openssl_evp_op foreach_openssl_aes_evp_op
69 #ifndef EVP_CTRL_AEAD_GET_TAG
70 #define EVP_CTRL_AEAD_GET_TAG EVP_CTRL_GCM_GET_TAG
73 #ifndef EVP_CTRL_AEAD_SET_TAG
74 #define EVP_CTRL_AEAD_SET_TAG EVP_CTRL_GCM_SET_TAG
77 #define foreach_openssl_hash_op \
79 _ (SHA224, EVP_sha224) \
80 _ (SHA256, EVP_sha256) \
81 _ (SHA384, EVP_sha384) \
82 _ (SHA512, EVP_sha512)
84 #define foreach_openssl_hmac_op \
87 _(SHA224, EVP_sha224) \
88 _(SHA256, EVP_sha256) \
89 _(SHA384, EVP_sha384) \
92 static_always_inline u32
93 openssl_ops_enc_cbc (vlib_main_t *vm, vnet_crypto_op_t *ops[],
94 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
95 const EVP_CIPHER *cipher, const int iv_len)
97 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
99 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
100 vnet_crypto_op_chunk_t *chp;
101 u32 i, j, curr_len = 0;
102 u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
104 for (i = 0; i < n_ops; i++)
106 vnet_crypto_op_t *op = ops[i];
107 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
110 EVP_EncryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
112 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
114 chp = chunks + op->chunk_index;
116 for (j = 0; j < op->n_chunks; j++)
118 EVP_EncryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
124 if (out_len < curr_len)
125 EVP_EncryptFinal_ex (ctx, out_buf + offset, &out_len);
128 chp = chunks + op->chunk_index;
129 for (j = 0; j < op->n_chunks; j++)
131 clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
138 EVP_EncryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
139 if (out_len < op->len)
140 EVP_EncryptFinal_ex (ctx, op->dst + out_len, &out_len);
142 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
147 static_always_inline u32
148 openssl_ops_dec_cbc (vlib_main_t *vm, vnet_crypto_op_t *ops[],
149 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
150 const EVP_CIPHER *cipher, const int iv_len)
152 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
154 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
155 vnet_crypto_op_chunk_t *chp;
156 u32 i, j, curr_len = 0;
157 u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
159 for (i = 0; i < n_ops; i++)
161 vnet_crypto_op_t *op = ops[i];
162 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
165 EVP_DecryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
167 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
169 chp = chunks + op->chunk_index;
171 for (j = 0; j < op->n_chunks; j++)
173 EVP_DecryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
179 if (out_len < curr_len)
180 EVP_DecryptFinal_ex (ctx, out_buf + offset, &out_len);
183 chp = chunks + op->chunk_index;
184 for (j = 0; j < op->n_chunks; j++)
186 clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
193 EVP_DecryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
194 if (out_len < op->len)
195 EVP_DecryptFinal_ex (ctx, op->dst + out_len, &out_len);
197 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
202 static_always_inline u32
203 openssl_ops_enc_aead (vlib_main_t *vm, vnet_crypto_op_t *ops[],
204 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
205 const EVP_CIPHER *cipher, int is_gcm, int is_gmac,
208 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
210 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
211 vnet_crypto_op_chunk_t *chp;
213 for (i = 0; i < n_ops; i++)
215 vnet_crypto_op_t *op = ops[i];
216 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
219 EVP_EncryptInit_ex (ctx, cipher, 0, 0, 0);
221 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, NULL);
222 EVP_EncryptInit_ex (ctx, 0, 0, key->data, op->iv);
224 EVP_EncryptUpdate (ctx, NULL, &len, op->aad, op->aad_len);
225 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
227 chp = chunks + op->chunk_index;
228 for (j = 0; j < op->n_chunks; j++)
230 EVP_EncryptUpdate (ctx, is_gmac ? 0 : chp->dst, &len, chp->src,
236 EVP_EncryptUpdate (ctx, is_gmac ? 0 : op->dst, &len, op->src, op->len);
237 EVP_EncryptFinal_ex (ctx, is_gmac ? 0 : op->dst + len, &len);
238 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_GET_TAG, op->tag_len, op->tag);
239 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
244 static_always_inline u32
245 openssl_ops_enc_null_gmac (vlib_main_t *vm, vnet_crypto_op_t *ops[],
246 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
247 const EVP_CIPHER *cipher, const int iv_len)
249 return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
250 /* is_gcm */ 1, /* is_gmac */ 1, iv_len);
253 static_always_inline u32
254 openssl_ops_enc_gcm (vlib_main_t *vm, vnet_crypto_op_t *ops[],
255 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
256 const EVP_CIPHER *cipher, const int iv_len)
258 return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
259 /* is_gcm */ 1, /* is_gmac */ 0, iv_len);
262 static_always_inline __clib_unused u32
263 openssl_ops_enc_chacha20_poly1305 (vlib_main_t *vm, vnet_crypto_op_t *ops[],
264 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
265 const EVP_CIPHER *cipher, const int iv_len)
267 return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
268 /* is_gcm */ 0, /* is_gmac */ 0, iv_len);
271 static_always_inline u32
272 openssl_ops_dec_aead (vlib_main_t *vm, vnet_crypto_op_t *ops[],
273 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
274 const EVP_CIPHER *cipher, int is_gcm, int is_gmac,
277 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
279 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
280 vnet_crypto_op_chunk_t *chp;
281 u32 i, j, n_fail = 0;
282 for (i = 0; i < n_ops; i++)
284 vnet_crypto_op_t *op = ops[i];
285 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
288 EVP_DecryptInit_ex (ctx, cipher, 0, 0, 0);
290 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, 0);
291 EVP_DecryptInit_ex (ctx, 0, 0, key->data, op->iv);
293 EVP_DecryptUpdate (ctx, 0, &len, op->aad, op->aad_len);
294 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
296 chp = chunks + op->chunk_index;
297 for (j = 0; j < op->n_chunks; j++)
299 EVP_DecryptUpdate (ctx, is_gmac ? 0 : chp->dst, &len, chp->src,
306 EVP_DecryptUpdate (ctx, is_gmac ? 0 : op->dst, &len, op->src,
309 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_SET_TAG, op->tag_len, op->tag);
311 if (EVP_DecryptFinal_ex (ctx, is_gmac ? 0 : op->dst + len, &len) > 0)
312 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
316 op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
319 return n_ops - n_fail;
322 static_always_inline u32
323 openssl_ops_dec_null_gmac (vlib_main_t *vm, vnet_crypto_op_t *ops[],
324 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
325 const EVP_CIPHER *cipher, const int iv_len)
327 return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
328 /* is_gcm */ 1, /* is_gmac */ 1, iv_len);
331 static_always_inline u32
332 openssl_ops_dec_gcm (vlib_main_t *vm, vnet_crypto_op_t *ops[],
333 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
334 const EVP_CIPHER *cipher, const int iv_len)
336 return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
337 /* is_gcm */ 1, /* is_gmac */ 0, iv_len);
340 static_always_inline __clib_unused u32
341 openssl_ops_dec_chacha20_poly1305 (vlib_main_t *vm, vnet_crypto_op_t *ops[],
342 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
343 const EVP_CIPHER *cipher, const int iv_len)
345 return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
346 /* is_gcm */ 0, /* is_gmac */ 0, iv_len);
349 static_always_inline u32
350 openssl_ops_hash (vlib_main_t *vm, vnet_crypto_op_t *ops[],
351 vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_MD *md)
353 openssl_per_thread_data_t *ptd =
354 vec_elt_at_index (per_thread_data, vm->thread_index);
355 EVP_MD_CTX *ctx = ptd->hash_ctx;
356 vnet_crypto_op_chunk_t *chp;
357 u32 md_len, i, j, n_fail = 0;
359 for (i = 0; i < n_ops; i++)
361 vnet_crypto_op_t *op = ops[i];
363 EVP_DigestInit_ex (ctx, md, NULL);
364 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
366 chp = chunks + op->chunk_index;
367 for (j = 0; j < op->n_chunks; j++)
369 EVP_DigestUpdate (ctx, chp->src, chp->len);
374 EVP_DigestUpdate (ctx, op->src, op->len);
376 EVP_DigestFinal_ex (ctx, op->digest, &md_len);
377 op->digest_len = md_len;
378 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
380 return n_ops - n_fail;
383 static_always_inline u32
384 openssl_ops_hmac (vlib_main_t * vm, vnet_crypto_op_t * ops[],
385 vnet_crypto_op_chunk_t * chunks, u32 n_ops,
389 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
391 HMAC_CTX *ctx = ptd->hmac_ctx;
392 vnet_crypto_op_chunk_t *chp;
393 u32 i, j, n_fail = 0;
394 for (i = 0; i < n_ops; i++)
396 vnet_crypto_op_t *op = ops[i];
397 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
398 unsigned int out_len = 0;
399 size_t sz = op->digest_len ? op->digest_len : EVP_MD_size (md);
401 HMAC_Init_ex (ctx, key->data, vec_len (key->data), md, NULL);
402 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
404 chp = chunks + op->chunk_index;
405 for (j = 0; j < op->n_chunks; j++)
407 HMAC_Update (ctx, chp->src, chp->len);
412 HMAC_Update (ctx, op->src, op->len);
413 HMAC_Final (ctx, buffer, &out_len);
415 if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK)
417 if ((memcmp (op->digest, buffer, sz)))
420 op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
425 clib_memcpy_fast (op->digest, buffer, sz);
426 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
428 return n_ops - n_fail;
431 #define _(m, a, b, iv) \
432 static u32 openssl_ops_enc_##a (vlib_main_t *vm, vnet_crypto_op_t *ops[], \
435 return openssl_ops_enc_##m (vm, ops, 0, n_ops, b (), iv); \
438 u32 openssl_ops_dec_##a (vlib_main_t *vm, vnet_crypto_op_t *ops[], \
441 return openssl_ops_dec_##m (vm, ops, 0, n_ops, b (), iv); \
444 static u32 openssl_ops_enc_chained_##a ( \
445 vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
448 return openssl_ops_enc_##m (vm, ops, chunks, n_ops, b (), iv); \
451 static u32 openssl_ops_dec_chained_##a ( \
452 vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
455 return openssl_ops_dec_##m (vm, ops, chunks, n_ops, b (), iv); \
458 foreach_openssl_evp_op;
462 static u32 openssl_ops_hash_##a (vlib_main_t *vm, vnet_crypto_op_t *ops[], \
465 return openssl_ops_hash (vm, ops, 0, n_ops, b ()); \
467 static u32 openssl_ops_hash_chained_##a ( \
468 vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
471 return openssl_ops_hash (vm, ops, chunks, n_ops, b ()); \
474 foreach_openssl_hash_op;
479 openssl_ops_hmac_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
480 { return openssl_ops_hmac (vm, ops, 0, n_ops, b ()); } \
482 openssl_ops_hmac_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
483 vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
484 { return openssl_ops_hmac (vm, ops, chunks, n_ops, b ()); } \
486 foreach_openssl_hmac_op;
491 crypto_openssl_init (vlib_main_t * vm)
493 vlib_thread_main_t *tm = vlib_get_thread_main ();
494 openssl_per_thread_data_t *ptd;
497 if (syscall (SYS_getrandom, &seed, sizeof (seed), 0) != sizeof (seed))
498 return clib_error_return_unix (0, "getrandom() failed");
500 RAND_seed (seed, sizeof (seed));
502 u32 eidx = vnet_crypto_register_engine (vm, "openssl", 50, "OpenSSL");
504 #define _(m, a, b, iv) \
505 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
506 openssl_ops_enc_##a, \
507 openssl_ops_enc_chained_##a); \
508 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
509 openssl_ops_dec_##a, \
510 openssl_ops_dec_chained_##a);
512 foreach_openssl_evp_op;
516 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \
517 openssl_ops_hmac_##a, \
518 openssl_ops_hmac_chained_##a); \
520 foreach_openssl_hmac_op;
524 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_HASH, \
525 openssl_ops_hash_##a, \
526 openssl_ops_hash_chained_##a);
528 foreach_openssl_hash_op;
531 vec_validate_aligned (per_thread_data, tm->n_vlib_mains - 1,
532 CLIB_CACHE_LINE_BYTES);
534 vec_foreach (ptd, per_thread_data)
536 ptd->evp_cipher_ctx = EVP_CIPHER_CTX_new ();
537 EVP_CIPHER_CTX_set_padding (ptd->evp_cipher_ctx, 0);
538 #if OPENSSL_VERSION_NUMBER >= 0x10100000L
539 ptd->hmac_ctx = HMAC_CTX_new ();
540 ptd->hash_ctx = EVP_MD_CTX_create ();
542 HMAC_CTX_init (&(ptd->_hmac_ctx));
543 ptd->hmac_ctx = &ptd->_hmac_ctx;
551 VLIB_INIT_FUNCTION (crypto_openssl_init) =
553 .runs_after = VLIB_INITS ("vnet_crypto_init"),
559 VLIB_PLUGIN_REGISTER () = {
560 .version = VPP_BUILD_VER,
561 .description = "OpenSSL Crypto Engine",
566 * fd.io coding-style-patch-verification: ON
569 * eval: (c-set-style "gnu")