1 /* SPDX-License-Identifier: Apache-2.0
2 * Copyright(c) 2024 Cisco Systems, Inc.
6 #include <vnet/plugin/plugin.h>
7 #include <vnet/crypto/crypto.h>
8 #include <crypto_native/crypto_native.h>
9 #include <vppinfra/crypto/sha2.h>
11 static_always_inline u32
12 crypto_native_ops_hash_sha2 (vlib_main_t *vm, vnet_crypto_op_t *ops[],
13 u32 n_ops, vnet_crypto_op_chunk_t *chunks,
14 clib_sha2_type_t type, int maybe_chained)
16 vnet_crypto_op_t *op = ops[0];
21 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
23 vnet_crypto_op_chunk_t *chp = chunks + op->chunk_index;
24 clib_sha2_init (&ctx, type);
25 for (int j = 0; j < op->n_chunks; j++, chp++)
26 clib_sha2_update (&ctx, chp->src, chp->len);
27 clib_sha2_final (&ctx, op->digest);
30 clib_sha2 (type, op->src, op->len, op->digest);
32 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
43 static_always_inline u32
44 crypto_native_ops_hmac_sha2 (vlib_main_t *vm, vnet_crypto_op_t *ops[],
45 u32 n_ops, vnet_crypto_op_chunk_t *chunks,
46 clib_sha2_type_t type)
48 crypto_native_main_t *cm = &crypto_native_main;
49 vnet_crypto_op_t *op = ops[0];
51 clib_sha2_hmac_ctx_t ctx;
55 for (; n_left; n_left--, op++)
58 &ctx, type, (clib_sha2_hmac_key_data_t *) cm->key_data[op->key_index]);
59 if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
61 vnet_crypto_op_chunk_t *chp = chunks + op->chunk_index;
62 for (int j = 0; j < op->n_chunks; j++, chp++)
63 clib_sha2_hmac_update (&ctx, chp->src, chp->len);
66 clib_sha2_hmac_update (&ctx, op->src, op->len);
68 clib_sha2_hmac_final (&ctx, buffer);
73 if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK)
75 if ((memcmp (op->digest, buffer, sz)))
78 op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
83 clib_memcpy_fast (op->digest, buffer, sz);
87 sz = clib_sha2_variants[type].digest_size;
88 if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK)
90 if ((memcmp (op->digest, buffer, sz)))
93 op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
98 clib_memcpy_fast (op->digest, buffer, sz);
101 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
104 return n_ops - n_fail;
108 sha2_key_add (vnet_crypto_key_t *key, clib_sha2_type_t type)
110 clib_sha2_hmac_key_data_t *kd;
112 kd = clib_mem_alloc_aligned (sizeof (*kd), CLIB_CACHE_LINE_BYTES);
113 clib_sha2_hmac_key_data (type, key->data, vec_len (key->data), kd);
121 #if defined(__SHA__) && defined(__x86_64__)
122 if (clib_cpu_supports_sha ())
124 #elif defined(__ARM_FEATURE_SHA2)
125 if (clib_cpu_supports_sha2 ())
132 static u32 crypto_native_ops_hash_sha##b ( \
133 vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops) \
135 return crypto_native_ops_hash_sha2 (vm, ops, n_ops, 0, CLIB_SHA2_##b, 0); \
138 static u32 crypto_native_ops_chained_hash_sha##b ( \
139 vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
142 return crypto_native_ops_hash_sha2 (vm, ops, n_ops, chunks, \
146 static u32 crypto_native_ops_hmac_sha##b ( \
147 vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops) \
149 return crypto_native_ops_hmac_sha2 (vm, ops, n_ops, 0, CLIB_SHA2_##b); \
152 static u32 crypto_native_ops_chained_hmac_sha##b ( \
153 vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
156 return crypto_native_ops_hmac_sha2 (vm, ops, n_ops, chunks, \
160 static void *sha2_##b##_key_add (vnet_crypto_key_t *k) \
162 return sha2_key_add (k, CLIB_SHA2_##b); \
165 CRYPTO_NATIVE_OP_HANDLER (crypto_native_hash_sha##b) = { \
166 .op_id = VNET_CRYPTO_OP_SHA##b##_HASH, \
167 .fn = crypto_native_ops_hash_sha##b, \
168 .cfn = crypto_native_ops_chained_hash_sha##b, \
171 CRYPTO_NATIVE_OP_HANDLER (crypto_native_hmac_sha##b) = { \
172 .op_id = VNET_CRYPTO_OP_SHA##b##_HMAC, \
173 .fn = crypto_native_ops_hmac_sha##b, \
174 .cfn = crypto_native_ops_chained_hmac_sha##b, \
177 CRYPTO_NATIVE_KEY_HANDLER (crypto_native_hmac_sha##b) = { \
178 .alg_id = VNET_CRYPTO_ALG_HMAC_SHA##b, \
179 .key_fn = sha2_##b##_key_add, \