*out_next_index = vlib_node_add_next (vm, prev_node->index, node->index);
}
+void
+ipsec_add_feature (const char *arc_name,
+ const char *node_name, u32 * out_feature_index)
+{
+ u8 arc;
+
+ arc = vnet_get_feature_arc_index (arc_name);
+ ASSERT (arc != (u8) ~ 0);
+ *out_feature_index = vnet_get_feature_index (arc, node_name);
+}
+
+void
+ipsec_unregister_udp_port (u16 port)
+{
+ ipsec_main_t *im = &ipsec_main;
+ u32 n_regs;
+ uword *p;
+
+ p = hash_get (im->udp_port_registrations, port);
+
+ ASSERT (p);
+
+ n_regs = p[0];
+
+ if (0 == --n_regs)
+ {
+ udp_unregister_dst_port (vlib_get_main (), port, 1);
+ hash_unset (im->udp_port_registrations, port);
+ }
+ else
+ {
+ hash_unset (im->udp_port_registrations, port);
+ hash_set (im->udp_port_registrations, port, n_regs);
+ }
+}
+
+void
+ipsec_register_udp_port (u16 port)
+{
+ ipsec_main_t *im = &ipsec_main;
+ u32 n_regs;
+ uword *p;
+
+ p = hash_get (im->udp_port_registrations, port);
+
+ n_regs = (p ? p[0] : 0);
+
+ if (0 == n_regs++)
+ udp_register_dst_port (vlib_get_main (), port,
+ ipsec4_tun_input_node.index, 1);
+
+ hash_unset (im->udp_port_registrations, port);
+ hash_set (im->udp_port_registrations, port, n_regs);
+}
+
u32
ipsec_register_ah_backend (vlib_main_t * vm, ipsec_main_t * im,
const char *name,
a->dec_op_id = VNET_CRYPTO_OP_NONE;
a->alg = VNET_CRYPTO_ALG_NONE;
a->iv_size = 0;
- a->block_size = 1;
+ a->block_align = 1;
a = im->crypto_algs + IPSEC_CRYPTO_ALG_DES_CBC;
a->enc_op_id = VNET_CRYPTO_OP_DES_CBC_ENC;
a->dec_op_id = VNET_CRYPTO_OP_DES_CBC_DEC;
a->alg = VNET_CRYPTO_ALG_DES_CBC;
- a->iv_size = a->block_size = 8;
+ a->iv_size = a->block_align = 8;
a = im->crypto_algs + IPSEC_CRYPTO_ALG_3DES_CBC;
a->enc_op_id = VNET_CRYPTO_OP_3DES_CBC_ENC;
a->dec_op_id = VNET_CRYPTO_OP_3DES_CBC_DEC;
a->alg = VNET_CRYPTO_ALG_3DES_CBC;
- a->iv_size = a->block_size = 8;
+ a->iv_size = a->block_align = 8;
a = im->crypto_algs + IPSEC_CRYPTO_ALG_AES_CBC_128;
a->enc_op_id = VNET_CRYPTO_OP_AES_128_CBC_ENC;
a->dec_op_id = VNET_CRYPTO_OP_AES_128_CBC_DEC;
a->alg = VNET_CRYPTO_ALG_AES_128_CBC;
- a->iv_size = a->block_size = 16;
+ a->iv_size = a->block_align = 16;
a = im->crypto_algs + IPSEC_CRYPTO_ALG_AES_CBC_192;
a->enc_op_id = VNET_CRYPTO_OP_AES_192_CBC_ENC;
a->dec_op_id = VNET_CRYPTO_OP_AES_192_CBC_DEC;
a->alg = VNET_CRYPTO_ALG_AES_192_CBC;
- a->iv_size = a->block_size = 16;
+ a->iv_size = a->block_align = 16;
a = im->crypto_algs + IPSEC_CRYPTO_ALG_AES_CBC_256;
a->enc_op_id = VNET_CRYPTO_OP_AES_256_CBC_ENC;
a->dec_op_id = VNET_CRYPTO_OP_AES_256_CBC_DEC;
a->alg = VNET_CRYPTO_ALG_AES_256_CBC;
- a->iv_size = a->block_size = 16;
+ a->iv_size = a->block_align = 16;
a = im->crypto_algs + IPSEC_CRYPTO_ALG_AES_GCM_128;
a->enc_op_id = VNET_CRYPTO_OP_AES_128_GCM_ENC;
a->dec_op_id = VNET_CRYPTO_OP_AES_128_GCM_DEC;
a->alg = VNET_CRYPTO_ALG_AES_128_GCM;
a->iv_size = 8;
- a->block_size = 16;
+ a->block_align = 1;
a->icv_size = 16;
a = im->crypto_algs + IPSEC_CRYPTO_ALG_AES_GCM_192;
a->dec_op_id = VNET_CRYPTO_OP_AES_192_GCM_DEC;
a->alg = VNET_CRYPTO_ALG_AES_192_GCM;
a->iv_size = 8;
- a->block_size = 16;
+ a->block_align = 1;
a->icv_size = 16;
a = im->crypto_algs + IPSEC_CRYPTO_ALG_AES_GCM_256;
a->dec_op_id = VNET_CRYPTO_OP_AES_256_GCM_DEC;
a->alg = VNET_CRYPTO_ALG_AES_256_GCM;
a->iv_size = 8;
- a->block_size = 16;
+ a->block_align = 1;
a->icv_size = 16;
vec_validate (im->integ_algs, IPSEC_INTEG_N_ALG - 1);