2 *------------------------------------------------------------------
3 * Copyright (c) 2019 Cisco and/or its affiliates.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at:
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *------------------------------------------------------------------
18 #include <vlib/vlib.h>
19 #include <vnet/plugin/plugin.h>
20 #include <vnet/crypto/crypto.h>
21 #include <x86intrin.h>
22 #include <crypto_ia32/crypto_ia32.h>
23 #include <crypto_ia32/aesni.h>
27 __m128i encrypt_key[15];
28 __m128i decrypt_key[15];
31 static_always_inline void
32 aes_cbc_dec (__m128i * k, u8 * src, u8 * dst, u8 * iv, int count,
33 aesni_key_size_t rounds)
35 __m128i r0, r1, r2, r3, c0, c1, c2, c3, f;
38 f = _mm_loadu_si128 ((__m128i *) iv);
42 _mm_prefetch (src + 128, _MM_HINT_T0);
43 _mm_prefetch (dst + 128, _MM_HINT_T0);
45 c0 = _mm_loadu_si128 (((__m128i *) src + 0));
46 c1 = _mm_loadu_si128 (((__m128i *) src + 1));
47 c2 = _mm_loadu_si128 (((__m128i *) src + 2));
48 c3 = _mm_loadu_si128 (((__m128i *) src + 3));
55 for (i = 1; i < rounds; i++)
57 r0 = _mm_aesdec_si128 (r0, k[i]);
58 r1 = _mm_aesdec_si128 (r1, k[i]);
59 r2 = _mm_aesdec_si128 (r2, k[i]);
60 r3 = _mm_aesdec_si128 (r3, k[i]);
63 r0 = _mm_aesdeclast_si128 (r0, k[i]);
64 r1 = _mm_aesdeclast_si128 (r1, k[i]);
65 r2 = _mm_aesdeclast_si128 (r2, k[i]);
66 r3 = _mm_aesdeclast_si128 (r3, k[i]);
68 _mm_storeu_si128 ((__m128i *) dst + 0, r0 ^ f);
69 _mm_storeu_si128 ((__m128i *) dst + 1, r1 ^ c0);
70 _mm_storeu_si128 ((__m128i *) dst + 2, r2 ^ c1);
71 _mm_storeu_si128 ((__m128i *) dst + 3, r3 ^ c2);
82 c0 = _mm_loadu_si128 (((__m128i *) src));
84 for (i = 1; i < rounds; i++)
85 r0 = _mm_aesdec_si128 (r0, k[i]);
86 r0 = _mm_aesdeclast_si128 (r0, k[i]);
87 _mm_storeu_si128 ((__m128i *) dst, r0 ^ f);
95 static_always_inline u32
96 aesni_ops_enc_aes_cbc (vlib_main_t * vm, vnet_crypto_op_t * ops[],
97 u32 n_ops, aesni_key_size_t ks)
99 crypto_ia32_main_t *cm = &crypto_ia32_main;
100 crypto_ia32_per_thread_data_t *ptd = vec_elt_at_index (cm->per_thread_data,
102 int rounds = AESNI_KEY_ROUNDS (ks);
106 vnet_crypto_key_index_t key_index[4] = { ~0, ~0, ~0, ~0 };
107 u32x4 dummy_mask = { };
109 u32 i, j, count, n_left = n_ops;
110 __m128i r[4] = { }, k[4][rounds + 1];
113 for (i = 0; i < 4; i++)
118 /* no more work to enqueue, so we are enqueueing dummy buffer */
119 src[i] = dst[i] = dummy;
120 len[i] = sizeof (dummy);
125 if (ops[0]->flags & VNET_CRYPTO_OP_FLAG_INIT_IV)
127 r[i] = ptd->cbc_iv[i];
128 _mm_storeu_si128 ((__m128i *) ops[0]->iv, r[i]);
129 ptd->cbc_iv[i] = _mm_aesenc_si128 (r[i], r[i]);
132 r[i] = _mm_loadu_si128 ((__m128i *) ops[0]->iv);
133 src[i] = ops[0]->src;
134 dst[i] = ops[0]->dst;
135 len[i] = ops[0]->len;
137 if (key_index[i] != ops[0]->key_index)
139 aes_cbc_key_data_t *kd;
140 key_index[i] = ops[0]->key_index;
141 kd = (aes_cbc_key_data_t *) cm->key_data[key_index[i]];
142 clib_memcpy_fast (k[i], kd->encrypt_key,
143 (rounds + 1) * sizeof (__m128i));
145 ops[0]->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
151 count = u32x4_min_scalar (len);
153 ASSERT (count % 16 == 0);
155 for (i = 0; i < count; i += 16)
157 r[0] ^= _mm_loadu_si128 ((__m128i *) (src[0] + i)) ^ k[0][0];
158 r[1] ^= _mm_loadu_si128 ((__m128i *) (src[1] + i)) ^ k[1][0];
159 r[2] ^= _mm_loadu_si128 ((__m128i *) (src[2] + i)) ^ k[2][0];
160 r[3] ^= _mm_loadu_si128 ((__m128i *) (src[3] + i)) ^ k[3][0];
162 for (j = 1; j < rounds; j++)
164 r[0] = _mm_aesenc_si128 (r[0], k[0][j]);
165 r[1] = _mm_aesenc_si128 (r[1], k[1][j]);
166 r[2] = _mm_aesenc_si128 (r[2], k[2][j]);
167 r[3] = _mm_aesenc_si128 (r[3], k[3][j]);
170 r[0] = _mm_aesenclast_si128 (r[0], k[0][j]);
171 r[1] = _mm_aesenclast_si128 (r[1], k[1][j]);
172 r[2] = _mm_aesenclast_si128 (r[2], k[2][j]);
173 r[3] = _mm_aesenclast_si128 (r[3], k[3][j]);
175 _mm_storeu_si128 ((__m128i *) (dst[0] + i), r[0]);
176 _mm_storeu_si128 ((__m128i *) (dst[1] + i), r[1]);
177 _mm_storeu_si128 ((__m128i *) (dst[2] + i), r[2]);
178 _mm_storeu_si128 ((__m128i *) (dst[3] + i), r[3]);
181 for (i = 0; i < 4; i++)
191 if (!u32x4_is_all_zero (len & dummy_mask))
197 static_always_inline u32
198 aesni_ops_dec_aes_cbc (vlib_main_t * vm, vnet_crypto_op_t * ops[],
199 u32 n_ops, aesni_key_size_t ks)
201 crypto_ia32_main_t *cm = &crypto_ia32_main;
202 int rounds = AESNI_KEY_ROUNDS (ks);
203 vnet_crypto_op_t *op = ops[0];
204 aes_cbc_key_data_t *kd = (aes_cbc_key_data_t *) cm->key_data[op->key_index];
210 aes_cbc_dec (kd->decrypt_key, op->src, op->dst, op->iv, op->len, rounds);
211 op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
216 kd = (aes_cbc_key_data_t *) cm->key_data[op->key_index];
223 static_always_inline void *
224 aesni_cbc_key_exp (vnet_crypto_key_t * key, aesni_key_size_t ks)
226 aes_cbc_key_data_t *kd;
227 kd = clib_mem_alloc_aligned (sizeof (*kd), CLIB_CACHE_LINE_BYTES);
228 aes_key_expand (kd->encrypt_key, key->data, ks);
229 aes_key_expand (kd->decrypt_key, key->data, ks);
230 aes_key_enc_to_dec (kd->decrypt_key, ks);
234 #define foreach_aesni_cbc_handler_type _(128) _(192) _(256)
237 static u32 aesni_ops_dec_aes_cbc_##x \
238 (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
239 { return aesni_ops_dec_aes_cbc (vm, ops, n_ops, AESNI_KEY_##x); } \
240 static u32 aesni_ops_enc_aes_cbc_##x \
241 (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
242 { return aesni_ops_enc_aes_cbc (vm, ops, n_ops, AESNI_KEY_##x); } \
243 static void * aesni_cbc_key_exp_##x (vnet_crypto_key_t *key) \
244 { return aesni_cbc_key_exp (key, AESNI_KEY_##x); }
246 foreach_aesni_cbc_handler_type;
253 crypto_ia32_aesni_cbc_init_avx512 (vlib_main_t * vm)
255 crypto_ia32_aesni_cbc_init_avx2 (vlib_main_t * vm)
257 crypto_ia32_aesni_cbc_init_sse42 (vlib_main_t * vm)
260 crypto_ia32_main_t *cm = &crypto_ia32_main;
261 crypto_ia32_per_thread_data_t *ptd;
262 clib_error_t *err = 0;
265 if ((fd = open ("/dev/urandom", O_RDONLY)) < 0)
266 return clib_error_return_unix (0, "failed to open '/dev/urandom'");
269 vec_foreach (ptd, cm->per_thread_data)
271 for (int i = 0; i < 4; i++)
273 if (read(fd, ptd->cbc_iv, sizeof (ptd->cbc_iv)) !=
274 sizeof (ptd->cbc_iv))
276 err = clib_error_return_unix (0, "'/dev/urandom' read failure");
284 vnet_crypto_register_ops_handler (vm, cm->crypto_engine_index, \
285 VNET_CRYPTO_OP_AES_##x##_CBC_ENC, \
286 aesni_ops_enc_aes_cbc_##x); \
287 vnet_crypto_register_ops_handler (vm, cm->crypto_engine_index, \
288 VNET_CRYPTO_OP_AES_##x##_CBC_DEC, \
289 aesni_ops_dec_aes_cbc_##x); \
290 cm->key_fn[VNET_CRYPTO_ALG_AES_##x##_CBC] = aesni_cbc_key_exp_##x;
291 foreach_aesni_cbc_handler_type;
300 * fd.io coding-style-patch-verification: ON
303 * eval: (c-set-style "gnu")