payload = b[0]->data + pd->current_data;
/* we need 4 extra bytes for HMAC calculation when ESN are used */
- if ((sa0->flags & IPSEC_SA_FLAG_USE_ESN) && pd->icv_sz &&
+ if (ipsec_sa_is_set_USE_ESN (sa0) && pd->icv_sz &&
(pd->current_data + pd->current_length + 4 > buffer_data_size))
{
b[0]->error = node->errors[ESP_DECRYPT_ERROR_NO_TAIL_SPACE];
current_sa_pkts += 1;
current_sa_bytes += pd->current_length;
- if (PREDICT_TRUE (cpd.icv_sz > 0))
+ if (PREDICT_TRUE (sa0->integ_op_id != VNET_CRYPTO_OP_NONE))
{
vnet_crypto_op_t *op;
vec_add2_aligned (ptd->integ_ops, op, 1, CLIB_CACHE_LINE_BYTES);
vnet_crypto_op_init (op, sa0->integ_op_id);
- op->key = sa0->integ_key.data;
- op->key_len = sa0->integ_key.len;
+ op->key_index = sa0->integ_key_index;
op->src = payload;
op->flags = VNET_CRYPTO_OP_FLAG_HMAC_CHECK;
op->user_data = b - bufs;
op->digest = payload + len;
op->digest_len = cpd.icv_sz;
op->len = len;
- if (PREDICT_TRUE (sa0->flags & IPSEC_SA_FLAG_USE_ESN))
+ if (ipsec_sa_is_set_USE_ESN (sa0))
{
/* shift ICV for 4 bytes to insert ESN */
u8 tmp[ESP_MAX_ICV_SIZE], sz = sizeof (sa0->seq_hi);
clib_memcpy_fast (payload + len, &sa0->seq_hi, sz);
clib_memcpy_fast (payload + len + sz, tmp, ESP_MAX_ICV_SIZE);
op->len += sz;
- op->dst += sz;
+ op->digest += sz;
}
}
vnet_crypto_op_t *op;
vec_add2_aligned (ptd->crypto_ops, op, 1, CLIB_CACHE_LINE_BYTES);
vnet_crypto_op_init (op, sa0->crypto_dec_op_id);
- op->key = sa0->crypto_key.data;
+ op->key_index = sa0->crypto_key_index;
op->iv = payload;
+
+ if (ipsec_sa_is_set_IS_AEAD (sa0))
+ {
+ esp_header_t *esp0;
+ esp_aead_t *aad;
+ u8 *scratch;
+
+ /*
+ * construct the AAD and the nonce (Salt || IV) in a scratch
+ * space in front of the IP header.
+ */
+ scratch = payload - esp_sz;
+ esp0 = (esp_header_t *) (scratch);
+
+ scratch -= (sizeof (*aad) + pd->hdr_sz);
+ op->aad = scratch;
+
+ esp_aad_fill (op, esp0, sa0);
+
+ /*
+ * we don't need to refer to the ESP header anymore so we
+ * can overwrite it with the salt and use the IV where it is
+ * to form the nonce = (Salt + IV)
+ */
+ op->iv -= sizeof (sa0->salt);
+ clib_memcpy_fast (op->iv, &sa0->salt, sizeof (sa0->salt));
+ op->iv_len = cpd.iv_sz + sizeof (sa0->salt);
+
+ op->tag = payload + len;
+ op->tag_len = 16;
+ }
op->src = op->dst = payload += cpd.iv_sz;
- op->len = len;
+ op->len = len - cpd.iv_sz;
op->user_data = b - bufs;
}
ASSERT (op - ptd->crypto_ops < vec_len (ptd->crypto_ops));
if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
{
- u32 bi = op->user_data;
- u32 err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
+ u32 err, bi;
+
+ bi = op->user_data;
+
+ if (op->status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
+ err = ESP_DECRYPT_ERROR_DECRYPTION_FAILED;
+ else
+ err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
+
bufs[bi]->error = node->errors[err];
nexts[bi] = ESP_DECRYPT_NEXT_DROP;
n--;