{
ptls_cipher_context_t super;
vnet_crypto_op_t op;
+ vnet_crypto_op_id_t id;
u32 key_index;
};
struct vpp_aead_context_t
{
ptls_aead_context_t super;
+ EVP_CIPHER_CTX *evp_ctx;
+ uint8_t static_iv[PTLS_MAX_IV_SIZE];
vnet_crypto_op_t op;
+ u32 key_index;
+ vnet_crypto_op_id_t id;
vnet_crypto_op_chunk_t chunks[2];
vnet_crypto_alg_t alg;
- u32 key_index;
u32 chunk_index;
uint8_t iv[PTLS_MAX_IV_SIZE];
- uint8_t static_iv[PTLS_MAX_IV_SIZE];
};
static void
{
struct cipher_context_t *ctx = (struct cipher_context_t *) _ctx;
- vnet_crypto_op_id_t id;
- if (!strcmp (ctx->super.algo->name, "AES128-CTR"))
- {
- id = VNET_CRYPTO_OP_AES_128_CTR_ENC;
- }
- else if (!strcmp (ctx->super.algo->name, "AES256-CTR"))
- {
- id = VNET_CRYPTO_OP_AES_256_CTR_ENC;
- }
- else
- {
- TLS_DBG (1, "%s, Invalid crypto cipher : ", __FUNCTION__,
- _ctx->algo->name);
- assert (0);
- }
-
- vnet_crypto_op_init (&ctx->op, id);
+ vnet_crypto_op_init (&ctx->op, ctx->id);
ctx->op.iv = (u8 *) iv;
ctx->op.key_index = ctx->key_index;
}
if (!strcmp (ctx->super.algo->name, "AES128-CTR"))
{
algo = VNET_CRYPTO_ALG_AES_128_CTR;
+ ctx->id = is_enc ? VNET_CRYPTO_OP_AES_128_CTR_ENC :
+ VNET_CRYPTO_OP_AES_128_CTR_DEC;
}
else if (!strcmp (ctx->super.algo->name, "AES256-CTR"))
{
algo = VNET_CRYPTO_ALG_AES_256_CTR;
+ ctx->id = is_enc ? VNET_CRYPTO_OP_AES_256_CTR_ENC :
+ VNET_CRYPTO_OP_AES_256_CTR_DEC;
}
else
{
struct vpp_aead_context_t *ctx = (struct vpp_aead_context_t *) _ctx;
int tag_size = ctx->super.algo->tag_size;
- ctx->op.dst = _output;
- ctx->op.src = (void *) input;
- ctx->op.len = inlen - tag_size;;
+ vnet_crypto_op_init (&ctx->op, ctx->id);
+ ctx->op.aad = (u8 *) aad;
+ ctx->op.aad_len = aadlen;
ctx->op.iv = ctx->iv;
ptls_aead__build_iv (ctx->super.algo, ctx->op.iv, ctx->static_iv, seq);
- ctx->op.aad = (void *) aad;
- ctx->op.aad_len = aadlen;
- ctx->op.tag = (void *) input + inlen - tag_size;
+ ctx->op.src = (u8 *) input;
+ ctx->op.dst = _output;
+ ctx->op.key_index = ctx->key_index;
+ ctx->op.len = inlen - tag_size;
ctx->op.tag_len = tag_size;
+ ctx->op.tag = ctx->op.src + ctx->op.len;
vnet_crypto_process_ops (vm, &(ctx->op), 1);
assert (ctx->op.status == VNET_CRYPTO_OP_STATUS_COMPLETED);
- return inlen - tag_size;
+ return ctx->op.len;
}
static void
const void *aad, size_t aadlen)
{
struct vpp_aead_context_t *ctx = (struct vpp_aead_context_t *) _ctx;
- ctx->op.iv = ctx->iv;
- ptls_aead__build_iv (ctx->super.algo, ctx->op.iv, ctx->static_iv, seq);
+
+ vnet_crypto_op_init (&ctx->op, ctx->id);
ctx->op.aad = (void *) aad;
ctx->op.aad_len = aadlen;
+ ctx->op.iv = ctx->iv;
+ ptls_aead__build_iv (ctx->super.algo, ctx->op.iv, ctx->static_iv, seq);
+ ctx->op.key_index = ctx->key_index;
ctx->op.n_chunks = 2;
ctx->op.chunk_index = 0;
static void
ptls_vpp_crypto_aead_dispose_crypto (ptls_aead_context_t * _ctx)
{
- /* Do nothing */
+ vlib_main_t *vm = vlib_get_main ();
+ struct vpp_aead_context_t *ctx = (struct vpp_aead_context_t *) _ctx;
+
+ clib_rwlock_writer_lock (&picotls_main.crypto_keys_rw_lock);
+ vnet_crypto_key_del (vm, ctx->key_index);
+ clib_rwlock_writer_unlock (&picotls_main.crypto_keys_rw_lock);
}
static int
struct vpp_aead_context_t *ctx = (struct vpp_aead_context_t *) _ctx;
u16 key_len = ctx->super.algo->key_size;
- memset (&(ctx->op), 0, sizeof (vnet_crypto_op_t));
-
if (alg == VNET_CRYPTO_ALG_AES_128_GCM)
{
- if (is_enc)
- vnet_crypto_op_init (&(ctx->op), VNET_CRYPTO_OP_AES_128_GCM_ENC);
- else
- vnet_crypto_op_init (&(ctx->op), VNET_CRYPTO_OP_AES_128_GCM_DEC);
+ ctx->id = is_enc ? VNET_CRYPTO_OP_AES_128_GCM_ENC :
+ VNET_CRYPTO_OP_AES_128_GCM_DEC;
}
else if (alg == VNET_CRYPTO_ALG_AES_256_GCM)
{
- if (is_enc)
- {
- vnet_crypto_op_init (&(ctx->op), VNET_CRYPTO_OP_AES_256_GCM_ENC);
- }
- else
- vnet_crypto_op_init (&(ctx->op), VNET_CRYPTO_OP_AES_256_GCM_DEC);
+ ctx->id = is_enc ? VNET_CRYPTO_OP_AES_256_GCM_ENC :
+ VNET_CRYPTO_OP_AES_256_GCM_DEC;
}
else
{
}
ctx->alg = alg;
+ ctx->chunk_index = 0;
+ clib_memcpy (ctx->static_iv, iv, ctx->super.algo->iv_size);
clib_rwlock_writer_lock (&picotls_main.crypto_keys_rw_lock);
- ctx->op.key_index =
- vnet_crypto_key_add (vm, ctx->alg, (void *) key, key_len);
+ ctx->key_index = vnet_crypto_key_add (vm, alg, (void *) key, key_len);
clib_rwlock_writer_unlock (&picotls_main.crypto_keys_rw_lock);
- ctx->chunk_index = 0;
- clib_memcpy (ctx->static_iv, iv, ctx->super.algo->iv_size);
- ctx->super.do_decrypt = ptls_vpp_crypto_aead_decrypt;
- ctx->super.do_encrypt_init = ptls_vpp_crypto_aead_encrypt_init;
- ctx->super.do_encrypt_update = ptls_vpp_crypto_aead_encrypt_update;
- ctx->super.do_encrypt_final = ptls_vpp_crypto_aead_encrypt_final;
+ if (is_enc)
+ {
+ ctx->super.do_encrypt_init = ptls_vpp_crypto_aead_encrypt_init;
+ ctx->super.do_encrypt_update = ptls_vpp_crypto_aead_encrypt_update;
+ ctx->super.do_encrypt_final = ptls_vpp_crypto_aead_encrypt_final;
+ }
+ else
+ {
+ ctx->super.do_decrypt = ptls_vpp_crypto_aead_decrypt;
+ }
ctx->super.dispose_crypto = ptls_vpp_crypto_aead_dispose_crypto;
return 0;
ptls_vpp_crypto_aes256ctr_setup_crypto
};
+#define PTLS_X86_CACHE_LINE_ALIGN_BITS 6
ptls_aead_algorithm_t ptls_vpp_crypto_aes128gcm = {
"AES128-GCM",
PTLS_AESGCM_CONFIDENTIALITY_LIMIT,
PTLS_AES128_KEY_SIZE,
PTLS_AESGCM_IV_SIZE,
PTLS_AESGCM_TAG_SIZE,
+ { PTLS_TLS12_AESGCM_FIXED_IV_SIZE, PTLS_TLS12_AESGCM_RECORD_IV_SIZE },
+ 1,
+ PTLS_X86_CACHE_LINE_ALIGN_BITS,
sizeof (struct vpp_aead_context_t),
ptls_vpp_crypto_aead_aes128gcm_setup_crypto
};
PTLS_AES256_KEY_SIZE,
PTLS_AESGCM_IV_SIZE,
PTLS_AESGCM_TAG_SIZE,
+ { PTLS_TLS12_AESGCM_FIXED_IV_SIZE, PTLS_TLS12_AESGCM_RECORD_IV_SIZE },
+ 1,
+ PTLS_X86_CACHE_LINE_ALIGN_BITS,
sizeof (struct vpp_aead_context_t),
ptls_vpp_crypto_aead_aes256gcm_setup_crypto
};