_(CHACHA20_POLY1305, "chacha20-poly1305-aad12", 32, 16, 12)
/* CRYPTO_ID, INTEG_ID, PRETTY_NAME, KEY_LENGTH_IN_BYTES, DIGEST_LEN */
-#define foreach_crypto_link_async_alg \
- _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12) \
- _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12) \
- _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12) \
- _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14) \
- _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14) \
- _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14) \
- _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16) \
- _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16) \
- _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16) \
- _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24) \
- _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24) \
- _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24) \
- _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32) \
- _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32) \
- _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32)
+#define foreach_crypto_link_async_alg \
+ _ (AES_128_CBC, SHA1, "aes-128-cbc-hmac-sha-1", 16, 12) \
+ _ (AES_192_CBC, SHA1, "aes-192-cbc-hmac-sha-1", 24, 12) \
+ _ (AES_256_CBC, SHA1, "aes-256-cbc-hmac-sha-1", 32, 12) \
+ _ (AES_128_CBC, SHA224, "aes-128-cbc-hmac-sha-224", 16, 14) \
+ _ (AES_192_CBC, SHA224, "aes-192-cbc-hmac-sha-224", 24, 14) \
+ _ (AES_256_CBC, SHA224, "aes-256-cbc-hmac-sha-224", 32, 14) \
+ _ (AES_128_CBC, SHA256, "aes-128-cbc-hmac-sha-256", 16, 16) \
+ _ (AES_192_CBC, SHA256, "aes-192-cbc-hmac-sha-256", 24, 16) \
+ _ (AES_256_CBC, SHA256, "aes-256-cbc-hmac-sha-256", 32, 16) \
+ _ (AES_128_CBC, SHA384, "aes-128-cbc-hmac-sha-384", 16, 24) \
+ _ (AES_192_CBC, SHA384, "aes-192-cbc-hmac-sha-384", 24, 24) \
+ _ (AES_256_CBC, SHA384, "aes-256-cbc-hmac-sha-384", 32, 24) \
+ _ (AES_128_CBC, SHA512, "aes-128-cbc-hmac-sha-512", 16, 32) \
+ _ (AES_192_CBC, SHA512, "aes-192-cbc-hmac-sha-512", 24, 32) \
+ _ (AES_256_CBC, SHA512, "aes-256-cbc-hmac-sha-512", 32, 32) \
+ _ (AES_128_CTR, SHA1, "aes-128-ctr-hmac-sha-1", 16, 12) \
+ _ (AES_192_CTR, SHA1, "aes-192-ctr-hmac-sha-1", 24, 12) \
+ _ (AES_256_CTR, SHA1, "aes-256-ctr-hmac-sha-1", 32, 12)
#define foreach_crypto_async_op_type \
_(ENCRYPT, "async-encrypt") \
typedef struct
{
- vnet_crypto_op_status_t status:8;
- u32 key_index;
- i16 crypto_start_offset; /* first buffer offset */
- i16 integ_start_offset;
- u32 crypto_total_length;
- /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
- u16 integ_length_adj;
u8 *iv;
union
{
u8 *tag;
};
u8 *aad;
+ u32 key_index;
+ u32 crypto_total_length;
+ i16 crypto_start_offset; /* first buffer offset */
+ i16 integ_start_offset;
+ /* adj total_length for integ, e.g.4 bytes for IPSec ESN */
+ u16 integ_length_adj;
+ vnet_crypto_op_status_t status : 8;
u8 flags; /**< share same VNET_CRYPTO_OP_FLAG_* values */
} vnet_crypto_async_frame_elt_t;
+/* Assert the size so the compiler will warn us when it changes */
+STATIC_ASSERT_SIZEOF (vnet_crypto_async_frame_elt_t, 5 * sizeof (u64));
+
+typedef enum vnet_crypto_async_frame_state_t_
+{
+ VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED,
+ /* frame waiting to be processed */
+ VNET_CRYPTO_FRAME_STATE_PENDING,
+ VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS,
+ VNET_CRYPTO_FRAME_STATE_SUCCESS,
+ VNET_CRYPTO_FRAME_STATE_ELT_ERROR
+} __clib_packed vnet_crypto_async_frame_state_t;
+
typedef struct
{
CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
-#define VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED 0
-#define VNET_CRYPTO_FRAME_STATE_PENDING 1 /* frame waiting to be processed */
-#define VNET_CRYPTO_FRAME_STATE_WORK_IN_PROGRESS 2
-#define VNET_CRYPTO_FRAME_STATE_SUCCESS 3
-#define VNET_CRYPTO_FRAME_STATE_ELT_ERROR 4
- u8 state;
+ vnet_crypto_async_frame_state_t state;
vnet_crypto_async_op_id_t op:8;
u16 n_elts;
vnet_crypto_async_frame_elt_t elts[VNET_CRYPTO_FRAME_SIZE];
CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
vnet_crypto_async_frame_t *frames[VNET_CRYPTO_ASYNC_OP_N_IDS];
vnet_crypto_async_frame_t *frame_pool;
- u32 *buffer_indice;
+ u32 *buffer_indices;
u16 *nexts;
} vnet_crypto_thread_t;
vnet_crypto_async_op_id_t opt = frame->op;
u32 i = vlib_num_workers () > 0;
- int ret = (cm->enqueue_handlers[frame->op]) (vm, frame);
+ frame->state = VNET_CRYPTO_FRAME_STATE_PENDING;
frame->enqueue_thread_index = vm->thread_index;
+
+ int ret = (cm->enqueue_handlers[frame->op]) (vm, frame);
+
clib_bitmap_set_no_check (cm->async_active_ids, opt, 1);
if (PREDICT_TRUE (ret == 0))
{
vnet_crypto_async_frame_t *nf = 0;
- frame->state = VNET_CRYPTO_FRAME_STATE_PENDING;
pool_get_aligned (ct->frame_pool, nf, CLIB_CACHE_LINE_BYTES);
if (CLIB_DEBUG > 0)
clib_memset (nf, 0xfe, sizeof (*nf));
nf->n_elts = 0;
ct->frames[opt] = nf;
}
+ else
+ {
+ frame->state = VNET_CRYPTO_FRAME_STATE_ELT_ERROR;
+ }
if (cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT)
{
{
vnet_crypto_async_op_id_t opt;
ASSERT (f != 0);
- ASSERT (f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED);
+ ASSERT ((f->state == VNET_CRYPTO_FRAME_STATE_NOT_PROCESSED
+ || f->state == VNET_CRYPTO_FRAME_STATE_ELT_ERROR));
opt = f->op;
if (CLIB_DEBUG > 0)
clib_memset (f, 0xfe, sizeof (*f));