crypto-native: add SHA2-HMAC
[vpp.git] / src / plugins / crypto_native / sha2.c
1 /* SPDX-License-Identifier: Apache-2.0
2  * Copyright(c) 2024 Cisco Systems, Inc.
3  */
4
5 #include <vlib/vlib.h>
6 #include <vnet/plugin/plugin.h>
7 #include <vnet/crypto/crypto.h>
8 #include <crypto_native/crypto_native.h>
9 #include <vppinfra/crypto/sha2.h>
10
11 static_always_inline u32
12 crypto_native_ops_hash_sha2 (vlib_main_t *vm, vnet_crypto_op_t *ops[],
13                              u32 n_ops, vnet_crypto_op_chunk_t *chunks,
14                              clib_sha2_type_t type, int maybe_chained)
15 {
16   vnet_crypto_op_t *op = ops[0];
17   clib_sha2_ctx_t ctx;
18   u32 n_left = n_ops;
19
20 next:
21   if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
22     {
23       vnet_crypto_op_chunk_t *chp = chunks + op->chunk_index;
24       clib_sha2_init (&ctx, type);
25       for (int j = 0; j < op->n_chunks; j++, chp++)
26         clib_sha2_update (&ctx, chp->src, chp->len);
27       clib_sha2_final (&ctx, op->digest);
28     }
29   else
30     clib_sha2 (type, op->src, op->len, op->digest);
31
32   op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
33
34   if (--n_left)
35     {
36       op += 1;
37       goto next;
38     }
39
40   return n_ops;
41 }
42
43 static_always_inline u32
44 crypto_native_ops_hmac_sha2 (vlib_main_t *vm, vnet_crypto_op_t *ops[],
45                              u32 n_ops, vnet_crypto_op_chunk_t *chunks,
46                              clib_sha2_type_t type)
47 {
48   crypto_native_main_t *cm = &crypto_native_main;
49   vnet_crypto_op_t *op = ops[0];
50   u32 n_left = n_ops;
51   clib_sha2_hmac_ctx_t ctx;
52   u8 buffer[64];
53   u32 sz, n_fail = 0;
54
55   for (; n_left; n_left--, op++)
56     {
57       clib_sha2_hmac_init (
58         &ctx, type, (clib_sha2_hmac_key_data_t *) cm->key_data[op->key_index]);
59       if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS)
60         {
61           vnet_crypto_op_chunk_t *chp = chunks + op->chunk_index;
62           for (int j = 0; j < op->n_chunks; j++, chp++)
63             clib_sha2_hmac_update (&ctx, chp->src, chp->len);
64         }
65       else
66         clib_sha2_hmac_update (&ctx, op->src, op->len);
67
68       clib_sha2_hmac_final (&ctx, buffer);
69
70       if (op->digest_len)
71         {
72           sz = op->digest_len;
73           if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK)
74             {
75               if ((memcmp (op->digest, buffer, sz)))
76                 {
77                   n_fail++;
78                   op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
79                   continue;
80                 }
81             }
82           else
83             clib_memcpy_fast (op->digest, buffer, sz);
84         }
85       else
86         {
87           sz = clib_sha2_variants[type].digest_size;
88           if (op->flags & VNET_CRYPTO_OP_FLAG_HMAC_CHECK)
89             {
90               if ((memcmp (op->digest, buffer, sz)))
91                 {
92                   n_fail++;
93                   op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
94                   continue;
95                 }
96             }
97           else
98             clib_memcpy_fast (op->digest, buffer, sz);
99         }
100
101       op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
102     }
103
104   return n_ops - n_fail;
105 }
106
107 static void *
108 sha2_key_add (vnet_crypto_key_t *key, clib_sha2_type_t type)
109 {
110   clib_sha2_hmac_key_data_t *kd;
111
112   kd = clib_mem_alloc_aligned (sizeof (*kd), CLIB_CACHE_LINE_BYTES);
113   clib_sha2_hmac_key_data (type, key->data, vec_len (key->data), kd);
114
115   return kd;
116 }
117
118 static int
119 probe ()
120 {
121 #if defined(__SHA__) && defined(__x86_64__)
122   if (clib_cpu_supports_sha ())
123     return 50;
124 #elif defined(__ARM_FEATURE_SHA2)
125   if (clib_cpu_supports_sha2 ())
126     return 10;
127 #endif
128   return -1;
129 }
130
131 #define _(b)                                                                  \
132   static u32 crypto_native_ops_hash_sha##b (                                  \
133     vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops)                      \
134   {                                                                           \
135     return crypto_native_ops_hash_sha2 (vm, ops, n_ops, 0, CLIB_SHA2_##b, 0); \
136   }                                                                           \
137                                                                               \
138   static u32 crypto_native_ops_chained_hash_sha##b (                          \
139     vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
140     u32 n_ops)                                                                \
141   {                                                                           \
142     return crypto_native_ops_hash_sha2 (vm, ops, n_ops, chunks,               \
143                                         CLIB_SHA2_##b, 1);                    \
144   }                                                                           \
145                                                                               \
146   static u32 crypto_native_ops_hmac_sha##b (                                  \
147     vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops)                      \
148   {                                                                           \
149     return crypto_native_ops_hmac_sha2 (vm, ops, n_ops, 0, CLIB_SHA2_##b);    \
150   }                                                                           \
151                                                                               \
152   static u32 crypto_native_ops_chained_hmac_sha##b (                          \
153     vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \
154     u32 n_ops)                                                                \
155   {                                                                           \
156     return crypto_native_ops_hmac_sha2 (vm, ops, n_ops, chunks,               \
157                                         CLIB_SHA2_##b);                       \
158   }                                                                           \
159                                                                               \
160   static void *sha2_##b##_key_add (vnet_crypto_key_t *k)                      \
161   {                                                                           \
162     return sha2_key_add (k, CLIB_SHA2_##b);                                   \
163   }                                                                           \
164                                                                               \
165   CRYPTO_NATIVE_OP_HANDLER (crypto_native_hash_sha##b) = {                    \
166     .op_id = VNET_CRYPTO_OP_SHA##b##_HASH,                                    \
167     .fn = crypto_native_ops_hash_sha##b,                                      \
168     .cfn = crypto_native_ops_chained_hash_sha##b,                             \
169     .probe = probe,                                                           \
170   };                                                                          \
171   CRYPTO_NATIVE_OP_HANDLER (crypto_native_hmac_sha##b) = {                    \
172     .op_id = VNET_CRYPTO_OP_SHA##b##_HMAC,                                    \
173     .fn = crypto_native_ops_hmac_sha##b,                                      \
174     .cfn = crypto_native_ops_chained_hmac_sha##b,                             \
175     .probe = probe,                                                           \
176   };                                                                          \
177   CRYPTO_NATIVE_KEY_HANDLER (crypto_native_hmac_sha##b) = {                   \
178     .alg_id = VNET_CRYPTO_ALG_HMAC_SHA##b,                                    \
179     .key_fn = sha2_##b##_key_add,                                             \
180     .probe = probe,                                                           \
181   };
182
183 _ (224)
184 _ (256)
185
186 #undef _