+static void
+crypto_ipsecmb_key_handler (vlib_main_t * vm, vnet_crypto_key_op_t kop,
+ vnet_crypto_key_index_t idx)
+{
+ ipsecmb_main_t *imbm = &ipsecmb_main;
+ vnet_crypto_key_t *key = vnet_crypto_get_key (idx);
+ ipsecmb_alg_data_t *ad = imbm->alg_data + key->alg;
+ u32 i;
+ void *kd;
+
+ if (kop == VNET_CRYPTO_KEY_OP_DEL)
+ {
+ if (idx >= vec_len (imbm->key_data))
+ return;
+
+ if (imbm->key_data[idx] == 0)
+ return;
+
+ clib_memset_u8 (imbm->key_data[idx], 0,
+ clib_mem_size (imbm->key_data[idx]));
+ clib_mem_free (imbm->key_data[idx]);
+ imbm->key_data[idx] = 0;
+ return;
+ }
+
+ if (ad->data_size == 0)
+ return;
+
+ vec_validate_aligned (imbm->key_data, idx, CLIB_CACHE_LINE_BYTES);
+
+ if (kop == VNET_CRYPTO_KEY_OP_MODIFY && imbm->key_data[idx])
+ {
+ clib_memset_u8 (imbm->key_data[idx], 0,
+ clib_mem_size (imbm->key_data[idx]));
+ clib_mem_free (imbm->key_data[idx]);
+ }
+
+ kd = imbm->key_data[idx] = clib_mem_alloc_aligned (ad->data_size,
+ CLIB_CACHE_LINE_BYTES);
+
+ /* AES CBC key expansion */
+ if (ad->keyexp)
+ {
+ ad->keyexp (key->data, ((ipsecmb_aes_cbc_key_data_t *) kd)->enc_key_exp,
+ ((ipsecmb_aes_cbc_key_data_t *) kd)->dec_key_exp);
+ return;
+ }
+
+ /* AES GCM */
+ if (ad->aes_gcm_pre)
+ {
+ ad->aes_gcm_pre (key->data, (struct gcm_key_data *) kd);
+ return;
+ }
+
+ /* HMAC */
+ if (ad->hash_one_block)
+ {
+ const int block_qw = HMAC_MAX_BLOCK_SIZE / sizeof (u64);
+ u64 pad[block_qw], key_hash[block_qw];
+
+ clib_memset_u8 (key_hash, 0, HMAC_MAX_BLOCK_SIZE);
+ if (vec_len (key->data) <= ad->block_size)
+ clib_memcpy_fast (key_hash, key->data, vec_len (key->data));
+ else
+ ad->hash_fn (key->data, vec_len (key->data), key_hash);
+
+ for (i = 0; i < block_qw; i++)
+ pad[i] = key_hash[i] ^ 0x3636363636363636;
+ ad->hash_one_block (pad, kd);
+
+ for (i = 0; i < block_qw; i++)
+ pad[i] = key_hash[i] ^ 0x5c5c5c5c5c5c5c5c;
+ ad->hash_one_block (pad, ((u8 *) kd) + (ad->data_size / 2));
+
+ return;
+ }
+}
+