2 *------------------------------------------------------------------
3 * Copyright (c) 2019 Cisco and/or its affiliates.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at:
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *------------------------------------------------------------------
18 #include <sys/random.h>
20 #include <openssl/evp.h>
21 #include <openssl/hmac.h>
22 #include <openssl/rand.h>
23 #include <openssl/sha.h>
25 #include <vlib/vlib.h>
26 #include <vnet/plugin/plugin.h>
27 #include <vnet/crypto/crypto.h>
28 #include <vpp/app/version.h>
32 CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
33 EVP_CIPHER_CTX *evp_cipher_ctx;
36 #if OPENSSL_VERSION_NUMBER < 0x10100000L
39 } openssl_per_thread_data_t;
41 static openssl_per_thread_data_t *per_thread_data = 0;
43 #define foreach_openssl_aes_evp_op \
44 _ (cbc, DES_CBC, EVP_des_cbc, 8) \
45 _ (cbc, 3DES_CBC, EVP_des_ede3_cbc, 8) \
46 _ (cbc, AES_128_CBC, EVP_aes_128_cbc, 16) \
47 _ (cbc, AES_192_CBC, EVP_aes_192_cbc, 16) \
48 _ (cbc, AES_256_CBC, EVP_aes_256_cbc, 16) \
49 _ (gcm, AES_128_GCM, EVP_aes_128_gcm, 8) \
50 _ (gcm, AES_192_GCM, EVP_aes_192_gcm, 8) \
51 _ (gcm, AES_256_GCM, EVP_aes_256_gcm, 8) \
52 _ (cbc, AES_128_CTR, EVP_aes_128_ctr, 8) \
53 _ (cbc, AES_192_CTR, EVP_aes_192_ctr, 8) \
54 _ (cbc, AES_256_CTR, EVP_aes_256_ctr, 8)
56 #define foreach_openssl_chacha20_evp_op \
57 _ (chacha20_poly1305, CHACHA20_POLY1305, EVP_chacha20_poly1305, 8)
59 #if OPENSSL_VERSION_NUMBER >= 0x10100000L
60 #define foreach_openssl_evp_op foreach_openssl_aes_evp_op \
61 foreach_openssl_chacha20_evp_op
63 #define foreach_openssl_evp_op foreach_openssl_aes_evp_op
66 #ifndef EVP_CTRL_AEAD_GET_TAG
67 #define EVP_CTRL_AEAD_GET_TAG EVP_CTRL_GCM_GET_TAG
70 #ifndef EVP_CTRL_AEAD_SET_TAG
71 #define EVP_CTRL_AEAD_SET_TAG EVP_CTRL_GCM_SET_TAG
74 #define foreach_openssl_hash_op \
76 _ (SHA224, EVP_sha224) \
77 _ (SHA256, EVP_sha256) \
78 _ (SHA384, EVP_sha384) \
79 _ (SHA512, EVP_sha512)
81 #define foreach_openssl_hmac_op \
84 _(SHA224, EVP_sha224) \
85 _(SHA256, EVP_sha256) \
86 _(SHA384, EVP_sha384) \
89 static_always_inline u32
90 openssl_ops_enc_cbc (vlib_main_t *vm, vnet_crypto_op_t *ops[],
91 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
92 const EVP_CIPHER *cipher, const int iv_len)
94 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
96 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
97 vnet_crypto_op_chunk_t *chp;
98 u32 i, j, curr_len = 0;
99 u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
101 for (i = 0; i < n_ops; i++)
103 vnet_crypto_op_t *op = ops[i];
104 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
107 if (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV)
108 RAND_bytes (op->iv, iv_len);
110 EVP_EncryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
112 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
113 EVP_CIPHER_CTX_set_padding (ctx, 0);
115 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
117 chp = chunks + op->chunk_index;
119 for (j = 0; j < op->n_chunks; j++)
121 EVP_EncryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
127 if (out_len < curr_len)
128 EVP_EncryptFinal_ex (ctx, out_buf + offset, &out_len);
131 chp = chunks + op->chunk_index;
132 for (j = 0; j < op->n_chunks; j++)
134 clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
141 EVP_EncryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
142 if (out_len < op->len)
143 EVP_EncryptFinal_ex (ctx, op->dst + out_len, &out_len);
145 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
150 static_always_inline u32
151 openssl_ops_dec_cbc (vlib_main_t *vm, vnet_crypto_op_t *ops[],
152 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
153 const EVP_CIPHER *cipher, const int iv_len)
155 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
157 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
158 vnet_crypto_op_chunk_t *chp;
159 u32 i, j, curr_len = 0;
160 u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
162 for (i = 0; i < n_ops; i++)
164 vnet_crypto_op_t *op = ops[i];
165 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
168 EVP_DecryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
170 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
171 EVP_CIPHER_CTX_set_padding (ctx, 0);
173 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
175 chp = chunks + op->chunk_index;
177 for (j = 0; j < op->n_chunks; j++)
179 EVP_DecryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
185 if (out_len < curr_len)
186 EVP_DecryptFinal_ex (ctx, out_buf + offset, &out_len);
189 chp = chunks + op->chunk_index;
190 for (j = 0; j < op->n_chunks; j++)
192 clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
199 EVP_DecryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
200 if (out_len < op->len)
201 EVP_DecryptFinal_ex (ctx, op->dst + out_len, &out_len);
203 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
208 static_always_inline u32
209 openssl_ops_enc_aead (vlib_main_t *vm, vnet_crypto_op_t *ops[],
210 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
211 const EVP_CIPHER *cipher, int is_gcm, const int iv_len)
213 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
215 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
216 vnet_crypto_op_chunk_t *chp;
218 for (i = 0; i < n_ops; i++)
220 vnet_crypto_op_t *op = ops[i];
221 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
224 if (op->flags & VNET_CRYPTO_OP_FLAG_INIT_IV)
225 RAND_bytes (op->iv, 8);
227 EVP_EncryptInit_ex (ctx, cipher, 0, 0, 0);
229 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, NULL);
230 EVP_EncryptInit_ex (ctx, 0, 0, key->data, op->iv);
232 EVP_EncryptUpdate (ctx, NULL, &len, op->aad, op->aad_len);
233 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
235 chp = chunks + op->chunk_index;
236 for (j = 0; j < op->n_chunks; j++)
238 EVP_EncryptUpdate (ctx, chp->dst, &len, chp->src, chp->len);
243 EVP_EncryptUpdate (ctx, op->dst, &len, op->src, op->len);
244 EVP_EncryptFinal_ex (ctx, op->dst + len, &len);
245 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_GET_TAG, op->tag_len, op->tag);
246 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
251 static_always_inline u32
252 openssl_ops_enc_gcm (vlib_main_t *vm, vnet_crypto_op_t *ops[],
253 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
254 const EVP_CIPHER *cipher, const int iv_len)
256 return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
257 /* is_gcm */ 1, iv_len);
260 static_always_inline __clib_unused u32
261 openssl_ops_enc_chacha20_poly1305 (vlib_main_t *vm, vnet_crypto_op_t *ops[],
262 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
263 const EVP_CIPHER *cipher, const int iv_len)
265 return openssl_ops_enc_aead (vm, ops, chunks, n_ops, cipher,
266 /* is_gcm */ 0, iv_len);
269 static_always_inline u32
270 openssl_ops_dec_aead (vlib_main_t *vm, vnet_crypto_op_t *ops[],
271 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
272 const EVP_CIPHER *cipher, int is_gcm, const int iv_len)
274 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
276 EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
277 vnet_crypto_op_chunk_t *chp;
278 u32 i, j, n_fail = 0;
279 for (i = 0; i < n_ops; i++)
281 vnet_crypto_op_t *op = ops[i];
282 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
285 EVP_DecryptInit_ex (ctx, cipher, 0, 0, 0);
287 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, 0);
288 EVP_DecryptInit_ex (ctx, 0, 0, key->data, op->iv);
290 EVP_DecryptUpdate (ctx, 0, &len, op->aad, op->aad_len);
291 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
293 chp = chunks + op->chunk_index;
294 for (j = 0; j < op->n_chunks; j++)
296 EVP_DecryptUpdate (ctx, chp->dst, &len, chp->src, chp->len);
301 EVP_DecryptUpdate (ctx, op->dst, &len, op->src, op->len);
302 EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_AEAD_SET_TAG, op->tag_len, op->tag);
304 if (EVP_DecryptFinal_ex (ctx, op->dst + len, &len) > 0)
305 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
309 op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
312 return n_ops - n_fail;
315 static_always_inline u32
316 openssl_ops_dec_gcm (vlib_main_t *vm, vnet_crypto_op_t *ops[],
317 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
318 const EVP_CIPHER *cipher, const int iv_len)
320 return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
321 /* is_gcm */ 1, iv_len);
324 static_always_inline __clib_unused u32
325 openssl_ops_dec_chacha20_poly1305 (vlib_main_t *vm, vnet_crypto_op_t *ops[],
326 vnet_crypto_op_chunk_t *chunks, u32 n_ops,
327 const EVP_CIPHER *cipher, const int iv_len)
329 return openssl_ops_dec_aead (vm, ops, chunks, n_ops, cipher,
330 /* is_gcm */ 0, iv_len);
333 static_always_inline u32
334 openssl_ops_hash (vlib_main_t *vm, vnet_crypto_op_t *ops[],
335 vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_MD *md)
337 openssl_per_thread_data_t *ptd =
338 vec_elt_at_index (per_thread_data, vm->thread_index);
339 EVP_MD_CTX *ctx = ptd->hash_ctx;
340 vnet_crypto_op_chunk_t *chp;
341 u32 md_len, i, j, n_fail = 0;
343 for (i = 0; i < n_ops; i++)
345 vnet_crypto_op_t *op = ops[i];
347 EVP_DigestInit_ex (ctx, md, NULL);
348 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
350 chp = chunks + op->chunk_index;
351 for (j = 0; j < op->n_chunks; j++)
353 EVP_DigestUpdate (ctx, chp->src, chp->len);
358 EVP_DigestUpdate (ctx, op->src, op->len);
360 EVP_DigestFinal_ex (ctx, op->digest, &md_len);
361 op->digest_len = md_len;
362 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
364 return n_ops - n_fail;
367 static_always_inline u32
368 openssl_ops_hmac (vlib_main_t * vm, vnet_crypto_op_t * ops[],
369 vnet_crypto_op_chunk_t * chunks, u32 n_ops,
373 openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
375 HMAC_CTX *ctx = ptd->hmac_ctx;
376 vnet_crypto_op_chunk_t *chp;
377 u32 i, j, n_fail = 0;
378 for (i = 0; i < n_ops; i++)
380 vnet_crypto_op_t *op = ops[i];
381 vnet_crypto_key_t *key = vnet_crypto_get_key (op->key_index);
382 unsigned int out_len = 0;
383 size_t sz = op->digest_len ? op->digest_len : EVP_MD_size (md);
385 HMAC_Init_ex (ctx, key->data, vec_len (key->data), md, NULL);
386 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
388 chp = chunks + op->chunk_index;
389 for (j = 0; j < op->n_chunks; j++)
391 HMAC_Update (ctx, chp->src, chp->len);
396 HMAC_Update (ctx, op->src, op->len);
397 HMAC_Final (ctx, buffer, &out_len);
399 if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK)
401 if ((memcmp (op->digest, buffer, sz)))
404 op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
409 clib_memcpy_fast (op->digest, buffer, sz);
410 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
412 return n_ops - n_fail;
415 #define _(m, a, b, iv) \
416 static u32 openssl_ops_enc_##a (vlib_main_t *vm, vnet_crypto_op_t *ops[], \
419 return openssl_ops_enc_##m (vm, ops, 0, n_ops, b (), iv); \
422 u32 openssl_ops_dec_##a (vlib_main_t *vm, vnet_crypto_op_t *ops[], \
425 return openssl_ops_dec_##m (vm, ops, 0, n_ops, b (), iv); \
428 static u32 openssl_ops_enc_chained_##a ( \
429 vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
432 return openssl_ops_enc_##m (vm, ops, chunks, n_ops, b (), iv); \
435 static u32 openssl_ops_dec_chained_##a ( \
436 vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
439 return openssl_ops_dec_##m (vm, ops, chunks, n_ops, b (), iv); \
442 foreach_openssl_evp_op;
446 static u32 openssl_ops_hash_##a (vlib_main_t *vm, vnet_crypto_op_t *ops[], \
449 return openssl_ops_hash (vm, ops, 0, n_ops, b ()); \
451 static u32 openssl_ops_hash_chained_##a ( \
452 vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
455 return openssl_ops_hash (vm, ops, chunks, n_ops, b ()); \
458 foreach_openssl_hash_op;
463 openssl_ops_hmac_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
464 { return openssl_ops_hmac (vm, ops, 0, n_ops, b ()); } \
466 openssl_ops_hmac_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
467 vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
468 { return openssl_ops_hmac (vm, ops, chunks, n_ops, b ()); } \
470 foreach_openssl_hmac_op;
475 crypto_openssl_init (vlib_main_t * vm)
477 vlib_thread_main_t *tm = vlib_get_thread_main ();
478 openssl_per_thread_data_t *ptd;
481 if (getrandom (&seed, sizeof (seed), 0) != sizeof (seed))
482 return clib_error_return_unix (0, "getrandom() failed");
484 RAND_seed (seed, sizeof (seed));
486 u32 eidx = vnet_crypto_register_engine (vm, "openssl", 50, "OpenSSL");
488 #define _(m, a, b, iv) \
489 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
490 openssl_ops_enc_##a, \
491 openssl_ops_enc_chained_##a); \
492 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
493 openssl_ops_dec_##a, \
494 openssl_ops_dec_chained_##a);
496 foreach_openssl_evp_op;
500 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \
501 openssl_ops_hmac_##a, \
502 openssl_ops_hmac_chained_##a); \
504 foreach_openssl_hmac_op;
508 vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_HASH, \
509 openssl_ops_hash_##a, \
510 openssl_ops_hash_chained_##a);
512 foreach_openssl_hash_op;
515 vec_validate_aligned (per_thread_data, tm->n_vlib_mains - 1,
516 CLIB_CACHE_LINE_BYTES);
518 vec_foreach (ptd, per_thread_data)
520 ptd->evp_cipher_ctx = EVP_CIPHER_CTX_new ();
521 #if OPENSSL_VERSION_NUMBER >= 0x10100000L
522 ptd->hmac_ctx = HMAC_CTX_new ();
523 ptd->hash_ctx = EVP_MD_CTX_create ();
525 HMAC_CTX_init (&(ptd->_hmac_ctx));
526 ptd->hmac_ctx = &ptd->_hmac_ctx;
534 VLIB_INIT_FUNCTION (crypto_openssl_init) =
536 .runs_after = VLIB_INITS ("vnet_crypto_init"),
542 VLIB_PLUGIN_REGISTER () = {
543 .version = VPP_BUILD_VER,
544 .description = "OpenSSL Crypto Engine",
549 * fd.io coding-style-patch-verification: ON
552 * eval: (c-set-style "gnu")