2 *------------------------------------------------------------------
3 * Copyright (c) 2019 Cisco and/or its affiliates.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at:
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *------------------------------------------------------------------
18 #include <vlib/vlib.h>
19 #include <vnet/plugin/plugin.h>
20 #include <vnet/crypto/crypto.h>
21 #include <crypto_native/crypto_native.h>
22 #include <vppinfra/crypto/aes_cbc.h>
24 #if __GNUC__ > 4 && !__clang__ && CLIB_DEBUG == 0
25 #pragma GCC optimize ("O3")
28 #if defined(__VAES__) && defined(__AVX512F__)
31 #define u32xN_min_scalar u32x16_min_scalar
32 #define u32xN_is_all_zero u32x16_is_all_zero
33 #define u32xN_splat u32x16_splat
34 #elif defined(__VAES__)
37 #define u32xN_min_scalar u32x8_min_scalar
38 #define u32xN_is_all_zero u32x8_is_all_zero
39 #define u32xN_splat u32x8_splat
43 #define u32xN_min_scalar u32x4_min_scalar
44 #define u32xN_is_all_zero u32x4_is_all_zero
45 #define u32xN_splat u32x4_splat
48 static_always_inline u32
49 aes_ops_enc_aes_cbc (vlib_main_t * vm, vnet_crypto_op_t * ops[],
50 u32 n_ops, aes_key_size_t ks)
52 crypto_native_main_t *cm = &crypto_native_main;
53 int rounds = AES_KEY_ROUNDS (ks);
55 u32 i, j, count, n_left = n_ops;
56 u32xN placeholder_mask = { };
58 vnet_crypto_key_index_t key_index[N_AES_BYTES];
59 u8 *src[N_AES_BYTES] = {};
60 u8 *dst[N_AES_BYTES] = {};
64 for (i = 0; i < N_AES_BYTES; i++)
68 for (i = 0; i < N_AES_BYTES; i++)
73 /* no more work to enqueue, so we are enqueueing placeholder buffer */
74 src[i] = dst[i] = placeholder;
75 len[i] = sizeof (placeholder);
76 placeholder_mask[i] = 0;
80 u8x16 t = aes_block_load (ops[0]->iv);
86 placeholder_mask[i] = ~0;
87 if (key_index[i] != ops[0]->key_index)
89 aes_cbc_key_data_t *kd;
90 key_index[i] = ops[0]->key_index;
91 kd = (aes_cbc_key_data_t *) cm->key_data[key_index[i]];
92 for (j = 0; j < rounds + 1; j++)
93 ((u8x16 *) k[j])[i] = kd->encrypt_key[j];
95 ops[0]->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
101 count = u32xN_min_scalar (len);
103 ASSERT (count % 16 == 0);
105 for (i = 0; i < count; i += 16)
107 #if defined(__VAES__) && defined(__AVX512F__)
108 r[0] = u8x64_xor3 (r[0], aes_block_load_x4 (src, i), k[0][0]);
109 r[1] = u8x64_xor3 (r[1], aes_block_load_x4 (src + 4, i), k[0][1]);
110 r[2] = u8x64_xor3 (r[2], aes_block_load_x4 (src + 8, i), k[0][2]);
111 r[3] = u8x64_xor3 (r[3], aes_block_load_x4 (src + 12, i), k[0][3]);
113 for (j = 1; j < rounds; j++)
115 r[0] = aes_enc_round_x4 (r[0], k[j][0]);
116 r[1] = aes_enc_round_x4 (r[1], k[j][1]);
117 r[2] = aes_enc_round_x4 (r[2], k[j][2]);
118 r[3] = aes_enc_round_x4 (r[3], k[j][3]);
120 r[0] = aes_enc_last_round_x4 (r[0], k[j][0]);
121 r[1] = aes_enc_last_round_x4 (r[1], k[j][1]);
122 r[2] = aes_enc_last_round_x4 (r[2], k[j][2]);
123 r[3] = aes_enc_last_round_x4 (r[3], k[j][3]);
125 aes_block_store_x4 (dst, i, r[0]);
126 aes_block_store_x4 (dst + 4, i, r[1]);
127 aes_block_store_x4 (dst + 8, i, r[2]);
128 aes_block_store_x4 (dst + 12, i, r[3]);
129 #elif defined(__VAES__)
130 r[0] = u8x32_xor3 (r[0], aes_block_load_x2 (src, i), k[0][0]);
131 r[1] = u8x32_xor3 (r[1], aes_block_load_x2 (src + 2, i), k[0][1]);
132 r[2] = u8x32_xor3 (r[2], aes_block_load_x2 (src + 4, i), k[0][2]);
133 r[3] = u8x32_xor3 (r[3], aes_block_load_x2 (src + 6, i), k[0][3]);
135 for (j = 1; j < rounds; j++)
137 r[0] = aes_enc_round_x2 (r[0], k[j][0]);
138 r[1] = aes_enc_round_x2 (r[1], k[j][1]);
139 r[2] = aes_enc_round_x2 (r[2], k[j][2]);
140 r[3] = aes_enc_round_x2 (r[3], k[j][3]);
142 r[0] = aes_enc_last_round_x2 (r[0], k[j][0]);
143 r[1] = aes_enc_last_round_x2 (r[1], k[j][1]);
144 r[2] = aes_enc_last_round_x2 (r[2], k[j][2]);
145 r[3] = aes_enc_last_round_x2 (r[3], k[j][3]);
147 aes_block_store_x2 (dst, i, r[0]);
148 aes_block_store_x2 (dst + 2, i, r[1]);
149 aes_block_store_x2 (dst + 4, i, r[2]);
150 aes_block_store_x2 (dst + 6, i, r[3]);
153 r[0] = u8x16_xor3 (r[0], aes_block_load (src[0] + i), k[0][0]);
154 r[1] = u8x16_xor3 (r[1], aes_block_load (src[1] + i), k[0][1]);
155 r[2] = u8x16_xor3 (r[2], aes_block_load (src[2] + i), k[0][2]);
156 r[3] = u8x16_xor3 (r[3], aes_block_load (src[3] + i), k[0][3]);
158 for (j = 1; j < rounds; j++)
160 r[0] = aes_enc_round_x1 (r[0], k[j][0]);
161 r[1] = aes_enc_round_x1 (r[1], k[j][1]);
162 r[2] = aes_enc_round_x1 (r[2], k[j][2]);
163 r[3] = aes_enc_round_x1 (r[3], k[j][3]);
166 r[0] = aes_enc_last_round_x1 (r[0], k[j][0]);
167 r[1] = aes_enc_last_round_x1 (r[1], k[j][1]);
168 r[2] = aes_enc_last_round_x1 (r[2], k[j][2]);
169 r[3] = aes_enc_last_round_x1 (r[3], k[j][3]);
171 aes_block_store (dst[0] + i, r[0]);
172 aes_block_store (dst[1] + i, r[1]);
173 aes_block_store (dst[2] + i, r[2]);
174 aes_block_store (dst[3] + i, r[3]);
176 r[0] ^= aes_block_load (src[0] + i);
177 r[1] ^= aes_block_load (src[1] + i);
178 r[2] ^= aes_block_load (src[2] + i);
179 r[3] ^= aes_block_load (src[3] + i);
180 for (j = 0; j < rounds - 1; j++)
182 r[0] = vaesmcq_u8 (vaeseq_u8 (r[0], k[j][0]));
183 r[1] = vaesmcq_u8 (vaeseq_u8 (r[1], k[j][1]));
184 r[2] = vaesmcq_u8 (vaeseq_u8 (r[2], k[j][2]));
185 r[3] = vaesmcq_u8 (vaeseq_u8 (r[3], k[j][3]));
187 r[0] = vaeseq_u8 (r[0], k[j][0]) ^ k[rounds][0];
188 r[1] = vaeseq_u8 (r[1], k[j][1]) ^ k[rounds][1];
189 r[2] = vaeseq_u8 (r[2], k[j][2]) ^ k[rounds][2];
190 r[3] = vaeseq_u8 (r[3], k[j][3]) ^ k[rounds][3];
191 aes_block_store (dst[0] + i, r[0]);
192 aes_block_store (dst[1] + i, r[1]);
193 aes_block_store (dst[2] + i, r[2]);
194 aes_block_store (dst[3] + i, r[3]);
199 len -= u32xN_splat (count);
201 for (i = 0; i < N_AES_BYTES; i++)
210 if (!u32xN_is_all_zero (len & placeholder_mask))
217 static_always_inline u32
218 aes_ops_dec_aes_cbc (vlib_main_t * vm, vnet_crypto_op_t * ops[],
219 u32 n_ops, aes_key_size_t ks)
221 crypto_native_main_t *cm = &crypto_native_main;
222 int rounds = AES_KEY_ROUNDS (ks);
223 vnet_crypto_op_t *op = ops[0];
224 aes_cbc_key_data_t *kd = (aes_cbc_key_data_t *) cm->key_data[op->key_index];
230 #if defined(__VAES__) && defined(__AVX512F__)
231 aes4_cbc_dec (kd->decrypt_key, (u8x64u *) op->src, (u8x64u *) op->dst,
232 (u8x16u *) op->iv, op->len, rounds);
233 #elif defined(__VAES__)
234 aes2_cbc_dec (kd->decrypt_key, (u8x32u *) op->src, (u8x32u *) op->dst,
235 (u8x16u *) op->iv, op->len, rounds);
237 aes_cbc_dec (kd->decrypt_key, (u8x16u *) op->src, (u8x16u *) op->dst,
238 (u8x16u *) op->iv, op->len, rounds);
240 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
245 kd = (aes_cbc_key_data_t *) cm->key_data[op->key_index];
252 #define foreach_aes_cbc_handler_type _(128) _(192) _(256)
255 static u32 aes_ops_dec_aes_cbc_##x \
256 (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
257 { return aes_ops_dec_aes_cbc (vm, ops, n_ops, AES_KEY_##x); } \
258 static u32 aes_ops_enc_aes_cbc_##x \
259 (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
260 { return aes_ops_enc_aes_cbc (vm, ops, n_ops, AES_KEY_##x); } \
262 foreach_aes_cbc_handler_type;
266 aes_cbc_key_exp_128 (vnet_crypto_key_t *key)
268 aes_cbc_key_data_t *kd;
269 kd = clib_mem_alloc_aligned (sizeof (*kd), CLIB_CACHE_LINE_BYTES);
270 clib_aes128_cbc_key_expand (kd, key->data);
275 aes_cbc_key_exp_192 (vnet_crypto_key_t *key)
277 aes_cbc_key_data_t *kd;
278 kd = clib_mem_alloc_aligned (sizeof (*kd), CLIB_CACHE_LINE_BYTES);
279 clib_aes192_cbc_key_expand (kd, key->data);
284 aes_cbc_key_exp_256 (vnet_crypto_key_t *key)
286 aes_cbc_key_data_t *kd;
287 kd = clib_mem_alloc_aligned (sizeof (*kd), CLIB_CACHE_LINE_BYTES);
288 clib_aes256_cbc_key_expand (kd, key->data);
295 #if defined(__VAES__) && defined(__AVX512F__)
296 crypto_native_aes_cbc_init_icl (vlib_main_t *vm)
297 #elif defined(__VAES__)
298 crypto_native_aes_cbc_init_adl (vlib_main_t *vm)
300 crypto_native_aes_cbc_init_skx (vlib_main_t * vm)
302 crypto_native_aes_cbc_init_neon (vlib_main_t * vm)
304 crypto_native_aes_cbc_init_hsw (vlib_main_t * vm)
306 crypto_native_aes_cbc_init_slm (vlib_main_t * vm)
309 crypto_native_main_t *cm = &crypto_native_main;
312 vnet_crypto_register_ops_handler (vm, cm->crypto_engine_index, \
313 VNET_CRYPTO_OP_AES_##x##_CBC_ENC, \
314 aes_ops_enc_aes_cbc_##x); \
315 vnet_crypto_register_ops_handler (vm, cm->crypto_engine_index, \
316 VNET_CRYPTO_OP_AES_##x##_CBC_DEC, \
317 aes_ops_dec_aes_cbc_##x); \
318 cm->key_fn[VNET_CRYPTO_ALG_AES_##x##_CBC] = aes_cbc_key_exp_##x;
319 foreach_aes_cbc_handler_type;
326 * fd.io coding-style-patch-verification: ON
329 * eval: (c-set-style "gnu")