-static_always_inline int
-esp_insert_esn (vlib_main_t * vm, ipsec_sa_t * sa,
- esp_decrypt_packet_data_t * pd, vnet_crypto_op_t * op,
- u16 * len, vlib_buffer_t * b, u8 * payload)
-{
- if (!ipsec_sa_is_set_USE_ESN (sa))
- return 1;
-
- /* shift ICV by 4 bytes to insert ESN */
- u32 seq_hi = clib_host_to_net_u32 (sa->seq_hi);
- u8 tmp[ESP_MAX_ICV_SIZE], sz = sizeof (sa->seq_hi);
-
- if (pd->icv_removed)
- {
- u16 space_left = vlib_buffer_space_left_at_end (vm, pd->lb);
- if (space_left >= sz)
- {
- clib_memcpy_fast (vlib_buffer_get_tail (pd->lb), &seq_hi, sz);
- op->len += sz;
- }
- else
- return 0;
-
- len[0] = b->current_length;
- }
- else
- {
- clib_memcpy_fast (tmp, payload + len[0], ESP_MAX_ICV_SIZE);
- clib_memcpy_fast (payload + len[0], &seq_hi, sz);
- clib_memcpy_fast (payload + len[0] + sz, tmp, ESP_MAX_ICV_SIZE);
- op->len += sz;
- op->digest += sz;
- }
- return 1;
-}
-
-static_always_inline u8 *
-esp_move_icv_esn (vlib_main_t * vm, vlib_buffer_t * first,
- esp_decrypt_packet_data_t * pd, u16 icv_sz, ipsec_sa_t * sa,
- u8 * extra_esn, vnet_crypto_op_t * op)
-{
- u16 dif = 0;
- u8 *digest = esp_move_icv (vm, first, pd, icv_sz, &dif);
- if (dif)
- op->len -= dif;
-
- if (ipsec_sa_is_set_USE_ESN (sa))
- {
- u8 sz = sizeof (sa->seq_hi);
- u32 seq_hi = clib_host_to_net_u32 (sa->seq_hi);
- u16 space_left = vlib_buffer_space_left_at_end (vm, pd->lb);
-
- if (space_left >= sz)
- {
- clib_memcpy_fast (vlib_buffer_get_tail (pd->lb), &seq_hi, sz);
- op->len += sz;
- }
- else
- {
- /* no space for ESN at the tail, use the next buffer
- * (with ICV data) */
- ASSERT (pd->icv_removed);
- vlib_buffer_t *tmp = vlib_get_buffer (vm, pd->free_buffer_index);
- clib_memcpy_fast (vlib_buffer_get_current (tmp) - sz, &seq_hi, sz);
- extra_esn[0] = 1;
- }
- }
- return digest;
-}
-