Update CRC list and support 21997/7 91/21991/28
authorVratko Polak <vrpolak@cisco.com>
Tue, 17 Sep 2019 11:00:39 +0000 (13:00 +0200)
committerVratko Polak <vrpolak@cisco.com>
Tue, 17 Sep 2019 12:30:19 +0000 (12:30 +0000)
- Attempt to repair IPsec LispGpe perf suite.
+ Collection name to reflect the current stable vpp.
+ Add messages found in CSIT L1 keywords.
- Uncommented (instead of deleted) untestable messages. Reasons:
 - Honeycomb.
 - Messages used by unused keywords.
 + Listed reasons. Honeycomb not mentioned if both reasons apply.
+ Delete CRC items for commands not found in keywords anymore.
+ Add CRCs from .json.api files (as teardown is hard to execute).
+ Define and restore alphabetical order.
+ Add hints to find used API commands (not entirely reliable).
+ Move used commands to "cmd = " form so hints find them.
+ Argument to run_cli_command changed from "cmd" to "cli_cmd".
+ Except also struct.error where IOError is excepted.

Change-Id: I61058dbe1e33296908aabd0c13433bb16cfa6adf
Signed-off-by: Vratko Polak <vrpolak@cisco.com>
resources/api/vpp/supported_crcs.yaml
resources/libraries/python/Memif.py
resources/libraries/python/PapiExecutor.py
resources/libraries/python/Trace.py
resources/libraries/python/VPPUtil.py
tests/vpp/perf/crypto/10ge2p1x710-ethip4ipsectptlispgpe-ip4base-aes128cbc-hmac256sha-ndrpdr.robot

index 05c8091..dd68e15 100644 (file)
 # Even with 0x removed, hexa CRC value may appear as decimal number.
 
 # Trailing comments are optional, for tracking how to test the message.
 # Even with 0x removed, hexa CRC value may appear as decimal number.
 
 # Trailing comments are optional, for tracking how to test the message.
+# Please keep alphabetic order.
+# Use bash command "env LC_COLLATE=C sort -u" if not clear.
 
 # https://logs.fd.io/production/vex-yul-rot-jenkins-1
 
 # https://logs.fd.io/production/vex-yul-rot-jenkins-1
-# /vpp-beta-merge-master-ubuntu1804/3566/archives/build-root/
-20.01-rc0~159:
+# /vpp-beta-merge-master-ubuntu1804/3572/archives/build-root/
+20.01-rc0~163:
     acl_add_replace: '0x13bc8539'  # perf
     acl_add_replace_reply: '0xac407b0c'  # perf
     acl_add_replace: '0x13bc8539'  # perf
     acl_add_replace_reply: '0xac407b0c'  # perf
+    acl_details: '0xf89d7a88'  # perf teardown
     acl_dump: '0xef34fea4'  # perf teardown
     acl_dump: '0xef34fea4'  # perf teardown
+    acl_interface_list_details: '0xd5e80809'  # perf teardown
     acl_interface_list_dump: '0x529cb13f'  # perf teardown
     acl_interface_list_dump: '0x529cb13f'  # perf teardown
-    # ^^^^ tc01-64B-1c-ethip4udp-ip4base-iacl1sf-10kflows-mrr
     acl_interface_set_acl_list: '0x8baece38'  # perf
     acl_interface_set_acl_list_reply: '0xe8d4e804'  # perf
     acl_interface_set_acl_list: '0x8baece38'  # perf
     acl_interface_set_acl_list_reply: '0xe8d4e804'  # perf
-    acl_details: '0xf89d7a88'  # perf teardown
-    acl_interface_list_details: '0xd5e80809'  # perf teardown
-    # ^^^^ tc01-64B-1c-ethip4udp-ip4base-iacl1sl-10kflows-mrr
+    # 8x^ tc01-64B-1c-ethip4udp-ip4base-iacl1sf-10kflows-mrr
+    #     tc01-64B-1c-ethip4udp-ip4base-iacl1sl-10kflows-mrr
     # ^^ ip4fwdANDiaclANDacl10AND100_flows
     # ^^ ip4fwdANDiaclANDacl10AND100_flows
-    avf_create: '0xdaab8ae2'  # perf
-    avf_create_reply: '0xfda5941f'  # perf
-    # ^^ tc01-64B-1c-avf-eth-l2bdbasemaclrn-mrr
-    # ^ l2bdmaclrnANDbaseANDdrv_avf
+    avf_create: '0xdaab8ae2'  # dev
+    avf_create_reply: '0xfda5941f'  # dev
     bond_create: '0xb0c98548'  # perf
     bond_create_reply: '0x903324db'  # perf
     bond_enslave: '0xd8bf37b6'  # perf
     bond_enslave_reply: '0xe8d4e804'  # perf
     bond_create: '0xb0c98548'  # perf
     bond_create_reply: '0x903324db'  # perf
     bond_enslave: '0xd8bf37b6'  # perf
     bond_enslave_reply: '0xe8d4e804'  # perf
-    # ^^^^ tc01-64B-1c-1lbvpplacp-dot1q-l2xcbase-eth-2vhostvr1024-1vm-mrr
+    # 4x^ tc01-64B-1c-1lbvpplacp-dot1q-l2xcbase-eth-2vhostvr1024-1vm-mrr
     # ^ l2xcfwdANDlbond_1l
     bridge_domain_add_del: '0xc6360720'  # dev
     bridge_domain_add_del_reply: '0xe8d4e804'  # dev
     # ^ l2xcfwdANDlbond_1l
     bridge_domain_add_del: '0xc6360720'  # dev
     bridge_domain_add_del_reply: '0xe8d4e804'  # dev
+    # bridge_domain_dump / details # honeycomb
     classify_add_del_session: '0x85fd79f4'  # dev
     classify_add_del_session_reply: '0xe8d4e804'  # dev
     classify_add_del_table: '0x9bd794ae'  # dev
     classify_add_del_table_reply: '0x05486349'  # dev
     classify_add_del_session: '0x85fd79f4'  # dev
     classify_add_del_session_reply: '0xe8d4e804'  # dev
     classify_add_del_table: '0x9bd794ae'  # dev
     classify_add_del_table_reply: '0x05486349'  # dev
+    # classify_session_dump / details # honeycomb
+    # classify_table_by_interface / reply # honeycomb
+    # classify_table_info / reply # honeycomb
     cli_inband: '0xf8377302'  # dev setup
     cli_inband_reply: '0x05879051'  # dev setup
     cop_interface_enable_disable: '0x69d24598'  # dev
     cli_inband: '0xf8377302'  # dev setup
     cli_inband_reply: '0x05879051'  # dev setup
     cop_interface_enable_disable: '0x69d24598'  # dev
     create_subif_reply: '0x903324db'  # virl
     create_vhost_user_if: '0xa3438cd4'  # dev
     create_vhost_user_if_reply: '0xfda5941f'  # dev
     create_subif_reply: '0x903324db'  # virl
     create_vhost_user_if: '0xa3438cd4'  # dev
     create_vhost_user_if_reply: '0xfda5941f'  # dev
-    create_vlan_subif: '0xbc534dd3'  # virl
-    create_vlan_subif_reply: '0x903324db'  # virl
+    create_vlan_subif: '0xbc534dd3'  # dev
+    create_vlan_subif_reply: '0x903324db'  # dev
+    # dhcp_proxy_dump / details # honeycomb
     gbp_bridge_domain_add: '0x70f1069c'  # perf
     gbp_bridge_domain_add_reply: '0xe8d4e804'  # perf
     gbp_bridge_domain_add: '0x70f1069c'  # perf
     gbp_bridge_domain_add_reply: '0xe8d4e804'  # perf
-    gbp_route_domain_add: '0x355b67c0'  # perf
-    gbp_route_domain_add_reply: '0xe8d4e804'  # perf
+    gbp_contract_add_del: '0xc64310d2'  # perf
+    gbp_contract_add_del_reply: '0x1992deab'  # perf
     gbp_endpoint_add: '0x6003c704'  # perf
     gbp_endpoint_add_reply: '0x1dd3ff3e'  # perf
     gbp_endpoint_group_add: '0x1031b376'  # perf
     gbp_endpoint_group_add_reply: '0xe8d4e804'  # perf
     gbp_endpoint_add: '0x6003c704'  # perf
     gbp_endpoint_add_reply: '0x1dd3ff3e'  # perf
     gbp_endpoint_group_add: '0x1031b376'  # perf
     gbp_endpoint_group_add_reply: '0xe8d4e804'  # perf
-    gbp_subnet_add_del: '0x4be859ed'  # perf
-    gbp_subnet_add_del_reply: '0xe8d4e804'  # perf
-    gbp_contract_add_del: '0xc64310d2'  # perf
-    gbp_contract_add_del_reply: '0x1992deab'  # perf
     gbp_ext_itf_add_del: '0x6995e85f'  # perf
     gbp_ext_itf_add_del_reply: '0xe8d4e804'  # perf
     gbp_ext_itf_add_del: '0x6995e85f'  # perf
     gbp_ext_itf_add_del_reply: '0xe8d4e804'  # perf
-    # ^^^^^^^^^^^^^^ tc01-64B-1c-avf-dot1q-l2bdbasemaclrn-gbp-mrr
+    gbp_route_domain_add: '0x355b67c0'  # perf
+    gbp_route_domain_add_reply: '0xe8d4e804'  # perf
+    gbp_subnet_add_del: '0x4be859ed'  # perf
+    gbp_subnet_add_del_reply: '0xe8d4e804'  # perf
+    # 14x^ tc01-64B-1c-avf-dot1q-l2bdbasemaclrn-gbp-mrr
     # ^ dot1qANDdrv_avfANDgbp
     # ^ dot1qANDdrv_avfANDgbp
-    gre_tunnel_add_del: '0x04199f47'  # virl
-    gre_tunnel_add_del_reply: '0x903324db'  # virl
     gpe_enable_disable: '0xeb0e943b'  # virl
     gpe_enable_disable_reply: '0xe8d4e804'  # virl
     gpe_enable_disable: '0xeb0e943b'  # virl
     gpe_enable_disable_reply: '0xe8d4e804'  # virl
+    # gre_tunnel_add_del / reply # unused L1 keyword: create_gre_tunnel_interface
     hw_interface_set_mtu: '0xe6c533aa'  # dev
     hw_interface_set_mtu_reply: '0xe8d4e804'  # dev
     input_acl_set_interface: '0xe09537b0'  # dev
     hw_interface_set_mtu: '0xe6c533aa'  # dev
     hw_interface_set_mtu_reply: '0xe8d4e804'  # dev
     input_acl_set_interface: '0xe09537b0'  # dev
     ip_probe_neighbor_reply: '0xe8d4e804'  # virl
     ip_route_add_del: '0x83e086ce'  # dev
     ip_route_add_del_reply: '0x1992deab'  # dev
     ip_probe_neighbor_reply: '0xe8d4e804'  # virl
     ip_route_add_del: '0x83e086ce'  # dev
     ip_route_add_del_reply: '0x1992deab'  # dev
-    ip_source_check_interface_add_del: '0x0a60152a'  # virl
-    ip_source_check_interface_add_del_reply: '0xe8d4e804'  # virl
+    # ip_source_check_interface_add_del / reply # unused L1 keyword vpp_ip_source_check_setup
     ip_table_add_del: '0xe5d378f2'  # dev
     ip_table_add_del_reply: '0xe8d4e804'  # dev
     ipsec_interface_add_del_spd: '0x1e3b8286'  # dev
     ipsec_interface_add_del_spd_reply: '0xe8d4e804'  # dev
     ipsec_sad_entry_add_del: '0xa25ab61e'  # dev
     ipsec_sad_entry_add_del_reply: '0x9ffac24b'  # dev
     ip_table_add_del: '0xe5d378f2'  # dev
     ip_table_add_del_reply: '0xe8d4e804'  # dev
     ipsec_interface_add_del_spd: '0x1e3b8286'  # dev
     ipsec_interface_add_del_spd_reply: '0xe8d4e804'  # dev
     ipsec_sad_entry_add_del: '0xa25ab61e'  # dev
     ipsec_sad_entry_add_del_reply: '0x9ffac24b'  # dev
+    ipsec_select_backend: '0x4fd24836'  # perf
+    ipsec_select_backend_reply: '0xe8d4e804'  # perf
+    # ^^ tc01-64B-1c-ethip4ipsec1tnlhw-ip4base-policy-aes256gcm-mrr
+    # ^ nic_intel-xl710ANDipsechwANDtnl_1ANDaes_256_gcm
     ipsec_spd_add_del: '0x9ffdf5da'  # dev
     ipsec_spd_add_del_reply: '0xe8d4e804'  # dev
     ipsec_spd_entry_add_del: '0x6bc6a3b5'  # dev
     ipsec_spd_entry_add_del_reply: '0x9ffac24b'  # dev
     ipsec_tunnel_if_add_del: '0xaa539b47'  # perf
     ipsec_tunnel_if_add_del_reply: '0xfda5941f'  # perf
     ipsec_spd_add_del: '0x9ffdf5da'  # dev
     ipsec_spd_add_del_reply: '0xe8d4e804'  # dev
     ipsec_spd_entry_add_del: '0x6bc6a3b5'  # dev
     ipsec_spd_entry_add_del_reply: '0x9ffac24b'  # dev
     ipsec_tunnel_if_add_del: '0xaa539b47'  # perf
     ipsec_tunnel_if_add_del_reply: '0xfda5941f'  # perf
-    # ^^ tc01-64B-1c-ethip4ipsec1tnlsw-ip4base-int-aes256gcm-mrr
-    # ^ ipsecswANDipsecintANDaes_256_gcmANDtnl_1
-    l2_interface_vlan_tag_rewrite: '0xb90be6b4'  # virl
-    l2_interface_vlan_tag_rewrite_reply: '0xe8d4e804'  # virl
+    # ^^ tc01-64B-1c-ethip4ipsec1tnlhw-ip4base-int-aes256gcm-mrr
+    # ^ See select_backend, the tag expression selects both -int- and -policy-.
+    # l2_fib_table_dump / details # honeycomb
+    l2_interface_vlan_tag_rewrite: '0xb90be6b4'  # dev
+    l2_interface_vlan_tag_rewrite_reply: '0xe8d4e804'  # dev
     l2_patch_add_del: '0x62506e63'  # perf
     l2_patch_add_del_reply: '0xe8d4e804'  # perf
     # ^^ tc01-64B-1c-avf-eth-l2patch-mrr
     # ^ l2patchANDdrv_avf
     l2_patch_add_del: '0x62506e63'  # perf
     l2_patch_add_del_reply: '0xe8d4e804'  # perf
     # ^^ tc01-64B-1c-avf-eth-l2patch-mrr
     # ^ l2patchANDdrv_avf
+    # l2fib_add_del / reply # unused L1 keyword: vpp_add_l2fib_entry
     lisp_add_del_adjacency: '0xf047390d'  # virl
     lisp_add_del_adjacency_reply: '0xe8d4e804'  # virl
     lisp_add_del_local_eid: '0xe6d00717'  # virl
     lisp_add_del_adjacency: '0xf047390d'  # virl
     lisp_add_del_adjacency_reply: '0xe8d4e804'  # virl
     lisp_add_del_local_eid: '0xe6d00717'  # virl
     lisp_add_del_locator_reply: '0xe8d4e804'  # virl
     lisp_add_del_locator_set: '0x06968e38'  # virl
     lisp_add_del_locator_set_reply: '0xb6666db4'  # virl
     lisp_add_del_locator_reply: '0xe8d4e804'  # virl
     lisp_add_del_locator_set: '0x06968e38'  # virl
     lisp_add_del_locator_set_reply: '0xb6666db4'  # virl
+    # lisp_add_del_map_resolver / reply # unused L2 keyword: Configure LISP map resolver address
     lisp_add_del_remote_mapping: '0xb879c3a9'  # virl
     lisp_add_del_remote_mapping_reply: '0xe8d4e804'  # virl
     lisp_add_del_remote_mapping: '0xb879c3a9'  # virl
     lisp_add_del_remote_mapping_reply: '0xe8d4e804'  # virl
-    lisp_eid_table_details: '0xdcd9f414'  # virl
-    lisp_eid_table_dump: '0xe0df64da'  # virl
+    lisp_eid_table_add_del_map: '0x59e9975e'  # perf
+    lisp_eid_table_add_del_map_reply: '0xe8d4e804'  # perf
+    # ^^ tc01-64B-1c-ethip4ipsectptlispgpe-ip4base-aes128cbc-hmac256sha-ndrpdr
+    # ^ ipsecANDlispgpe
+    # lisp_eid_table_dump / details # unused L2 keyword: LISP eid address should be set correctly to eid table
     lisp_enable_disable: '0xeb0e943b'  # virl
     lisp_enable_disable_reply: '0xe8d4e804'  # virl
     lisp_enable_disable: '0xeb0e943b'  # virl
     lisp_enable_disable_reply: '0xe8d4e804'  # virl
-    lisp_locator_set_details: '0x6b846882'  # virl
-    lisp_locator_set_dump: '0xc79e8ab0'  # virl
-    lisp_map_resolver_details: '0x60a5f5ca'  # virl
-    lisp_map_resolver_dump: '0x51077d14'  # virl
+    # lisp_locator_set_dump / details # unused L2 keyword: LISP locator_set shpuld be configured correctly
+    # lisp_map_server_dump / details # honeycomb
+    # lisp_map_resolver_dump / details # unused L2 keyword: LISP map resolver address should be configured correctly
     macip_acl_add: '0x0c680ca5'  # perf
     macip_acl_add_reply: '0xac407b0c'  # perf
     macip_acl_add: '0x0c680ca5'  # perf
     macip_acl_add_reply: '0xac407b0c'  # perf
-    macip_acl_details: '0xe164e69a'  # perf
-    macip_acl_dump: '0xef34fea4'  # perf
+    macip_acl_details: '0xe164e69a'  # perf teardown
+    macip_acl_dump: '0xef34fea4'  # perf teardown
     macip_acl_interface_add_del: '0x6a6be97c'  # perf
     macip_acl_interface_add_del_reply: '0xe8d4e804'  # perf
     macip_acl_interface_add_del: '0x6a6be97c'  # perf
     macip_acl_interface_add_del_reply: '0xe8d4e804'  # perf
-    macip_acl_interface_get: '0x51077d14'  # perf
-    macip_acl_interface_get_reply: '0xaccf9b05'  # perf
-    # ^^^^^^^^ tc01-64B-1c-eth-l2bdbasemaclrn-macip-iacl1sl-100flows-mrr
+    macip_acl_interface_get: '0x51077d14'  # perf teardown
+    macip_acl_interface_get_reply: '0xaccf9b05'  # perf teardown
+    # 8x^ tc01-64B-1c-eth-l2bdbasemaclrn-macip-iacl1sl-100flows-mrr
     # ^ macipANDacl1AND100_flows
     memif_create: '0xba720d42'  # dev
     memif_create_reply: '0x903324db'  # dev
     # ^ macipANDacl1AND100_flows
     memif_create: '0xba720d42'  # dev
     memif_create_reply: '0x903324db'  # dev
     memif_dump: '0x51077d14'  # dev
     memif_socket_filename_add_del: '0xa2ce1a10'  # dev
     memif_socket_filename_add_del_reply: '0xe8d4e804'  # dev
     memif_dump: '0x51077d14'  # dev
     memif_socket_filename_add_del: '0xa2ce1a10'  # dev
     memif_socket_filename_add_del_reply: '0xe8d4e804'  # dev
-    nat_det_add_del_map: '0x04b76549'  # perf
-    nat_det_add_del_map_reply: '0xe8d4e804'  # perf
     nat44_interface_add_del_feature: '0xef3edad1'  # perf
     nat44_interface_add_del_feature_reply: '0xe8d4e804'  # perf
     nat44_interface_add_del_feature: '0xef3edad1'  # perf
     nat44_interface_add_del_feature_reply: '0xe8d4e804'  # perf
-    # ^^^^ tc01-64B-1c-ethip4udp-ip4base-nat44-mrr
+    nat_det_add_del_map: '0x04b76549'  # perf
+    nat_det_add_del_map_reply: '0xe8d4e804'  # perf
+    nat_show_config: '0x51077d14'  # perf teardown
+    nat_show_config_reply: '0x006a0786'  # perf teardown
+    # 6x^ tc01-64B-1c-ethip4udp-ip4base-nat44-mrr
     # ^ nat44NOTscaleNOTsrc_user_1
     # ^ nat44NOTscaleNOTsrc_user_1
-    proxy_arp_intfc_enable_disable: '0x69d24598'  # virl
-    proxy_arp_intfc_enable_disable_reply: '0xe8d4e804'  # virl
-    show_lisp_status: '0x51077d14'  # virl
-    show_lisp_status_reply: '0xddcf48ef'  # virl
+    # show_lisp_map_register_state / reply # honeycomb
+    # show_lisp_map_request_mode / reply # honeycomb
+    # show_lisp_pitr / reply # honeycomb
+    # show_lisp_rloc_probe_state / reply # honeycomb
+    # show_lisp_status / reply # unused L2 keywords: LISP should be enabled / disabled
+    # show_lisp_use_petr / reply # honeycomb
     show_threads: '0x51077d14'  # dev
     show_threads_reply: '0xf5e0b66f'  # dev
     show_version: '0x51077d14'  # dev setup
     show_version_reply: '0xc919bde1'  # dev setup
     sr_localsid_add_del: '0xb30489eb'  # perf
     sr_localsid_add_del_reply: '0xe8d4e804'  # perf
     show_threads: '0x51077d14'  # dev
     show_threads_reply: '0xf5e0b66f'  # dev
     show_version: '0x51077d14'  # dev setup
     show_version_reply: '0xc919bde1'  # dev setup
     sr_localsid_add_del: '0xb30489eb'  # perf
     sr_localsid_add_del_reply: '0xe8d4e804'  # perf
-    sr_localsids_details: '0x0791babc'  # perf
-    sr_localsids_dump: '0x51077d14'  # perf
-    sr_policies_details: '0x5087f460'  # perf
-    sr_policies_dump: '0x51077d14'  # perf
+    sr_localsids_details: '0x0791babc'  # perf teardown
+    sr_localsids_dump: '0x51077d14'  # perf teardown
+    sr_policies_details: '0x5087f460'  # perf teardown
+    sr_policies_dump: '0x51077d14'  # perf teardown
     sr_policy_add: '0x4b6e2484'  # perf
     sr_policy_add_reply: '0xe8d4e804'  # perf
     sr_set_encap_source: '0xd05bb4de'  # perf
     sr_set_encap_source_reply: '0xe8d4e804'  # perf
     sr_steering_add_del: '0x28b5dcab'  # perf
     sr_steering_add_del_reply: '0xe8d4e804'  # perf
     sr_policy_add: '0x4b6e2484'  # perf
     sr_policy_add_reply: '0xe8d4e804'  # perf
     sr_set_encap_source: '0xd05bb4de'  # perf
     sr_set_encap_source_reply: '0xe8d4e804'  # perf
     sr_steering_add_del: '0x28b5dcab'  # perf
     sr_steering_add_del_reply: '0xe8d4e804'  # perf
-    sr_steering_pol_details: '0x5627d483'  # perf
-    sr_steering_pol_dump: '0x51077d14'  # perf
+    sr_steering_pol_details: '0x5627d483'  # perf teardown
+    sr_steering_pol_dump: '0x51077d14'  # perf teardown
     # x^ tc01-78B-1c-ethip6ip6-ip6base-srv6enc1sid-mrr
     # ^ srv6_1sid
     sw_interface_add_del_address: '0x43a487bd'  # dev
     # x^ tc01-78B-1c-ethip6ip6-ip6base-srv6enc1sid-mrr
     # ^ srv6_1sid
     sw_interface_add_del_address: '0x43a487bd'  # dev
     # ^^ see bond_*
     sw_interface_details: '0xe67d02b6'  # dev setup
     sw_interface_dump: '0x4ecaa564'  # dev setup
     # ^^ see bond_*
     sw_interface_details: '0xe67d02b6'  # dev setup
     sw_interface_dump: '0x4ecaa564'  # dev setup
+    # sw_interface_get_table / reply # honeycomb
     sw_interface_ip6nd_ra_config: '0xc3f02daa'  # dev
     sw_interface_ip6nd_ra_config_reply: '0xe8d4e804'  # dev
     sw_interface_rx_placement_details: '0x93d413ec'  # perf
     sw_interface_ip6nd_ra_config: '0xc3f02daa'  # dev
     sw_interface_ip6nd_ra_config_reply: '0xe8d4e804'  # dev
     sw_interface_rx_placement_details: '0x93d413ec'  # perf
     sw_interface_set_l2_xconnect_reply: '0xe8d4e804'  # dev
     sw_interface_set_rx_placement: '0xe462d0d9'  # perf
     sw_interface_set_rx_placement_reply: '0xe8d4e804'  # perf
     sw_interface_set_l2_xconnect_reply: '0xe8d4e804'  # dev
     sw_interface_set_rx_placement: '0xe462d0d9'  # perf
     sw_interface_set_rx_placement_reply: '0xe8d4e804'  # perf
-    # ^^ tc01-64B-1c-eth-l2xcbase-eth-2memif-1dcr-mrr
-    # ^ l2xcfwdANDbaseANDlxcANDmemif
+    # ^^ see tc01-64B-1c-dot1q-l2bdbasemaclrn-eth-2memif-1dcr-mrr above
     sw_interface_set_table: '0x2d412a82'  # dev
     sw_interface_set_table_reply: '0xe8d4e804'  # dev
     sw_interface_set_unnumbered: '0x61e660a5'  # perf
     sw_interface_set_table: '0x2d412a82'  # dev
     sw_interface_set_table_reply: '0xe8d4e804'  # dev
     sw_interface_set_unnumbered: '0x61e660a5'  # perf
     sw_interface_slave_details: '0xe65656d2'  # perf
     sw_interface_slave_dump: '0xd85aab0d'  # perf
     # ^^ see bond_*
     sw_interface_slave_details: '0xe65656d2'  # perf
     sw_interface_slave_dump: '0xd85aab0d'  # perf
     # ^^ see bond_*
+    # sw_interface_span_dump / details # honeycomb
     sw_interface_tap_v2_dump: '0x51077d14'  # dev
     sw_interface_tap_v2_details: '0x5ee87a5f'  # dev
     sw_interface_vhost_user_details: '0x91ff3307'  # dev
     sw_interface_vhost_user_dump: '0x51077d14'  # dev
     tap_create_v2: '0x8fa99320'  # dev
     tap_create_v2_reply: '0xfda5941f'  # dev
     sw_interface_tap_v2_dump: '0x51077d14'  # dev
     sw_interface_tap_v2_details: '0x5ee87a5f'  # dev
     sw_interface_vhost_user_details: '0x91ff3307'  # dev
     sw_interface_vhost_user_dump: '0x51077d14'  # dev
     tap_create_v2: '0x8fa99320'  # dev
     tap_create_v2_reply: '0xfda5941f'  # dev
-    vxlan_add_del_tunnel: '0x00f4bdd0'  # virl
-    vxlan_add_del_tunnel_reply: '0xfda5941f'  # virl
-    vxlan_tunnel_details: '0xce38e127'  # virl
-    vxlan_tunnel_dump: '0x529cb13f'  # virl
+    vxlan_add_del_tunnel: '0x00f4bdd0'  # dev
+    vxlan_add_del_tunnel_reply: '0xfda5941f'  # dev
+    # vxlan_gpe_tunnel_dump / details # honeycomb
+    # vxlan_tunnel_dump /details # unused L2 keyword: Get VXLAN dump
 
 
 # https://gerrit.fd.io/r/c/vpp/+/21997
 
 
 # https://gerrit.fd.io/r/c/vpp/+/21997
-21997/6:
+21997/7:
     acl_add_replace: '0x13bc8539'  # perf
     acl_add_replace_reply: '0xac407b0c'  # perf
     acl_add_replace: '0x13bc8539'  # perf
     acl_add_replace_reply: '0xac407b0c'  # perf
+    acl_details: '0xf89d7a88'  # perf teardown
     acl_dump: '0xef34fea4'  # perf teardown
     acl_dump: '0xef34fea4'  # perf teardown
+    acl_interface_list_details: '0xd5e80809'  # perf teardown
     acl_interface_list_dump: '0x529cb13f'  # perf teardown
     acl_interface_list_dump: '0x529cb13f'  # perf teardown
-    # ^^^^ tc01-64B-1c-ethip4udp-ip4base-iacl1sf-10kflows-mrr
     acl_interface_set_acl_list: '0x8baece38'  # perf
     acl_interface_set_acl_list_reply: '0xe8d4e804'  # perf
     acl_interface_set_acl_list: '0x8baece38'  # perf
     acl_interface_set_acl_list_reply: '0xe8d4e804'  # perf
-    acl_details: '0xf89d7a88'  # perf teardown
-    acl_interface_list_details: '0xd5e80809'  # perf teardown
-    # ^^^^ tc01-64B-1c-ethip4udp-ip4base-iacl1sl-10kflows-mrr
+    # 8x^ tc01-64B-1c-ethip4udp-ip4base-iacl1sf-10kflows-mrr
+    #     tc01-64B-1c-ethip4udp-ip4base-iacl1sl-10kflows-mrr
     # ^^ ip4fwdANDiaclANDacl10AND100_flows
     # ^^ ip4fwdANDiaclANDacl10AND100_flows
-    avf_create: '0xdaab8ae2'  # perf
-    avf_create_reply: '0xfda5941f'  # perf
-    # ^^ tc01-64B-1c-avf-eth-l2bdbasemaclrn-mrr
-    # ^ l2bdmaclrnANDbaseANDdrv_avf
-    bond_create: '0xf19b4ad0'  # perf
-    bond_create_reply: '0xfda5941f'  # perf
-    bond_enslave: '0x0ded34f6'  # perf
+    avf_create: '0xdaab8ae2'  # dev
+    avf_create_reply: '0xfda5941f'  # dev
+    bond_create: '0x48883c7e'  # perf
+    bond_create_reply: '0x5383d31f'  # perf
+    bond_enslave: '0x076ecfa7'  # perf
     bond_enslave_reply: '0xe8d4e804'  # perf
     bond_enslave_reply: '0xe8d4e804'  # perf
-    # ^^^^ tc01-64B-1c-1lbvpplacp-dot1q-l2xcbase-eth-2vhostvr1024-1vm-mrr
+    # 4x^ tc01-64B-1c-1lbvpplacp-dot1q-l2xcbase-eth-2vhostvr1024-1vm-mrr
     # ^ l2xcfwdANDlbond_1l
     bridge_domain_add_del: '0xc6360720'  # dev
     bridge_domain_add_del_reply: '0xe8d4e804'  # dev
     # ^ l2xcfwdANDlbond_1l
     bridge_domain_add_del: '0xc6360720'  # dev
     bridge_domain_add_del_reply: '0xe8d4e804'  # dev
+    # bridge_domain_dump / details # honeycomb
     classify_add_del_session: '0x85fd79f4'  # dev
     classify_add_del_session_reply: '0xe8d4e804'  # dev
     classify_add_del_table: '0x9bd794ae'  # dev
     classify_add_del_table_reply: '0x05486349'  # dev
     classify_add_del_session: '0x85fd79f4'  # dev
     classify_add_del_session_reply: '0xe8d4e804'  # dev
     classify_add_del_table: '0x9bd794ae'  # dev
     classify_add_del_table_reply: '0x05486349'  # dev
+    # classify_session_dump / details # honeycomb
+    # classify_table_by_interface / reply # honeycomb
+    # classify_table_info / reply # honeycomb
     cli_inband: '0xf8377302'  # dev setup
     cli_inband_reply: '0x05879051'  # dev setup
     cop_interface_enable_disable: '0x69d24598'  # dev
     cli_inband: '0xf8377302'  # dev setup
     cli_inband_reply: '0x05879051'  # dev setup
     cop_interface_enable_disable: '0x69d24598'  # dev
     create_subif_reply: '0x5383d31f'  # virl
     create_vhost_user_if: '0xa3438cd4'  # dev
     create_vhost_user_if_reply: '0xfda5941f'  # dev
     create_subif_reply: '0x5383d31f'  # virl
     create_vhost_user_if: '0xa3438cd4'  # dev
     create_vhost_user_if_reply: '0xfda5941f'  # dev
-    create_vlan_subif: '0xaf34ac8b'  # virl
-    create_vlan_subif_reply: '0x5383d31f'  # virl
+    create_vlan_subif: '0xaf34ac8b'  # dev
+    create_vlan_subif_reply: '0x5383d31f'  # dev
+    # dhcp_proxy_dump / details # honeycomb
     gbp_bridge_domain_add: '0x70f1069c'  # perf
     gbp_bridge_domain_add_reply: '0xe8d4e804'  # perf
     gbp_contract_add_del: '0x5b63d90a'  # perf
     gbp_bridge_domain_add: '0x70f1069c'  # perf
     gbp_bridge_domain_add_reply: '0xe8d4e804'  # perf
     gbp_contract_add_del: '0x5b63d90a'  # perf
     gbp_route_domain_add_reply: '0xe8d4e804'  # perf
     gbp_subnet_add_del: '0xf42b9430'  # perf
     gbp_subnet_add_del_reply: '0xe8d4e804'  # perf
     gbp_route_domain_add_reply: '0xe8d4e804'  # perf
     gbp_subnet_add_del: '0xf42b9430'  # perf
     gbp_subnet_add_del_reply: '0xe8d4e804'  # perf
-    # ^^^^^^^^^^^^^^ tc01-64B-1c-avf-dot1q-l2bdbasemaclrn-gbp-mrr
+    # 14x^ tc01-64B-1c-avf-dot1q-l2bdbasemaclrn-gbp-mrr
     # ^ dot1qANDdrv_avfANDgbp
     # ^ dot1qANDdrv_avfANDgbp
-    gre_tunnel_add_del: '0x4bf7bdec'  # virl
-    gre_tunnel_add_del_reply: '0x5383d31f'  # virl
     gpe_enable_disable: '0xeb0e943b'  # virl
     gpe_enable_disable_reply: '0xe8d4e804'  # virl
     gpe_enable_disable: '0xeb0e943b'  # virl
     gpe_enable_disable_reply: '0xe8d4e804'  # virl
+    # gre_tunnel_add_del / reply # unused L1 keyword: create_gre_tunnel_interface
     hw_interface_set_mtu: '0xe6746899'  # dev
     hw_interface_set_mtu_reply: '0xe8d4e804'  # dev
     input_acl_set_interface: '0xe09537b0'  # dev
     hw_interface_set_mtu: '0xe6746899'  # dev
     hw_interface_set_mtu_reply: '0xe8d4e804'  # dev
     input_acl_set_interface: '0xe09537b0'  # dev
     ip_probe_neighbor_reply: '0xe8d4e804'  # virl
     ip_route_add_del: '0x5ceee41c'  # dev
     ip_route_add_del_reply: '0x1992deab'  # dev
     ip_probe_neighbor_reply: '0xe8d4e804'  # virl
     ip_route_add_del: '0x5ceee41c'  # dev
     ip_route_add_del_reply: '0x1992deab'  # dev
-    ip_source_check_interface_add_del: '0x0a60152a'  # virl
-    ip_source_check_interface_add_del_reply: '0xe8d4e804'  # virl
+    # ip_source_check_interface_add_del / reply # unused L1 keyword vpp_ip_source_check_setup
     ip_table_add_del: '0xe5d378f2'  # dev
     ip_table_add_del_reply: '0xe8d4e804'  # dev
     ipsec_interface_add_del_spd: '0x1e3b8286'  # dev
     ipsec_interface_add_del_spd_reply: '0xe8d4e804'  # dev
     ipsec_sad_entry_add_del: '0xa25ab61e'  # dev
     ipsec_sad_entry_add_del_reply: '0x9ffac24b'  # dev
     ip_table_add_del: '0xe5d378f2'  # dev
     ip_table_add_del_reply: '0xe8d4e804'  # dev
     ipsec_interface_add_del_spd: '0x1e3b8286'  # dev
     ipsec_interface_add_del_spd_reply: '0xe8d4e804'  # dev
     ipsec_sad_entry_add_del: '0xa25ab61e'  # dev
     ipsec_sad_entry_add_del_reply: '0x9ffac24b'  # dev
+    ipsec_select_backend: '0x4fd24836'  # perf
+    ipsec_select_backend_reply: '0xe8d4e804'  # perf
+    # ^^ tc01-64B-1c-ethip4ipsec1tnlhw-ip4base-policy-aes256gcm-mrr
+    # ^ nic_intel-xl710ANDipsechwANDtnl_1ANDaes_256_gcm
     ipsec_spd_add_del: '0x9ffdf5da'  # dev
     ipsec_spd_add_del_reply: '0xe8d4e804'  # dev
     ipsec_spd_entry_add_del: '0xdb217840'  # dev
     ipsec_spd_entry_add_del_reply: '0x9ffac24b'  # dev
     ipsec_tunnel_if_add_del: '0xd5a98274'  # perf
     ipsec_tunnel_if_add_del_reply: '0xfda5941f'  # perf
     ipsec_spd_add_del: '0x9ffdf5da'  # dev
     ipsec_spd_add_del_reply: '0xe8d4e804'  # dev
     ipsec_spd_entry_add_del: '0xdb217840'  # dev
     ipsec_spd_entry_add_del_reply: '0x9ffac24b'  # dev
     ipsec_tunnel_if_add_del: '0xd5a98274'  # perf
     ipsec_tunnel_if_add_del_reply: '0xfda5941f'  # perf
-    # ^^ tc01-64B-1c-ethip4ipsec1tnlsw-ip4base-int-aes256gcm-mrr
-    # ^ ipsecswANDipsecintANDaes_256_gcmANDtnl_1
-    l2_interface_vlan_tag_rewrite: '0xb90be6b4'  # virl
-    l2_interface_vlan_tag_rewrite_reply: '0xe8d4e804'  # virl
+    # ^^ tc01-64B-1c-ethip4ipsec1tnlhw-ip4base-int-aes256gcm-mrr
+    # ^ See select_backend, the tag expression selects both -int- and -policy-.
+    # l2_fib_table_dump / details # honeycomb
+    l2_interface_vlan_tag_rewrite: '0xb90be6b4'  # dev
+    l2_interface_vlan_tag_rewrite_reply: '0xe8d4e804'  # dev
     l2_patch_add_del: '0x62506e63'  # perf
     l2_patch_add_del_reply: '0xe8d4e804'  # perf
     # ^^ tc01-64B-1c-avf-eth-l2patch-mrr
     # ^ l2patchANDdrv_avf
     l2_patch_add_del: '0x62506e63'  # perf
     l2_patch_add_del_reply: '0xe8d4e804'  # perf
     # ^^ tc01-64B-1c-avf-eth-l2patch-mrr
     # ^ l2patchANDdrv_avf
+    # l2fib_add_del / reply # unused L1 keyword: vpp_add_l2fib_entry
     lisp_add_del_adjacency: '0xf047390d'  # virl
     lisp_add_del_adjacency_reply: '0xe8d4e804'  # virl
     lisp_add_del_local_eid: '0xe6d00717'  # virl
     lisp_add_del_adjacency: '0xf047390d'  # virl
     lisp_add_del_adjacency_reply: '0xe8d4e804'  # virl
     lisp_add_del_local_eid: '0xe6d00717'  # virl
     lisp_add_del_locator_reply: '0xe8d4e804'  # virl
     lisp_add_del_locator_set: '0x06968e38'  # virl
     lisp_add_del_locator_set_reply: '0xb6666db4'  # virl
     lisp_add_del_locator_reply: '0xe8d4e804'  # virl
     lisp_add_del_locator_set: '0x06968e38'  # virl
     lisp_add_del_locator_set_reply: '0xb6666db4'  # virl
+    # lisp_add_del_map_resolver / reply # unused L2 keyword: Configure LISP map resolver address
     lisp_add_del_remote_mapping: '0xb879c3a9'  # virl
     lisp_add_del_remote_mapping_reply: '0xe8d4e804'  # virl
     lisp_add_del_remote_mapping: '0xb879c3a9'  # virl
     lisp_add_del_remote_mapping_reply: '0xe8d4e804'  # virl
-    lisp_eid_table_details: '0xdcd9f414'  # virl
-    lisp_eid_table_dump: '0xe0df64da'  # virl
+    lisp_eid_table_add_del_map: '0x59e9975e'  # perf
+    lisp_eid_table_add_del_map_reply: '0xe8d4e804'  # perf
+    # ^^ tc01-64B-1c-ethip4ipsectptlispgpe-ip4base-aes128cbc-hmac256sha-ndrpdr
+    # ^ ipsecANDlispgpe
+    # lisp_eid_table_dump / details # unused L2 keyword: LISP eid address should be set correctly to eid table
     lisp_enable_disable: '0xeb0e943b'  # virl
     lisp_enable_disable_reply: '0xe8d4e804'  # virl
     lisp_enable_disable: '0xeb0e943b'  # virl
     lisp_enable_disable_reply: '0xe8d4e804'  # virl
-    lisp_locator_set_details: '0x6b846882'  # virl
-    lisp_locator_set_dump: '0xc79e8ab0'  # virl
-    lisp_map_resolver_details: '0x60a5f5ca'  # virl
-    lisp_map_resolver_dump: '0x51077d14'  # virl
+    # lisp_locator_set_dump / details # unused L2 keyword: LISP locator_set shpuld be configured correctly
+    # lisp_map_server_dump / details # honeycomb
+    # lisp_map_resolver_dump / details # unused L2 keyword: LISP map resolver address should be configured correctly
     macip_acl_add: '0x0c680ca5'  # perf
     macip_acl_add_reply: '0xac407b0c'  # perf
     macip_acl_add: '0x0c680ca5'  # perf
     macip_acl_add_reply: '0xac407b0c'  # perf
-    macip_acl_details: '0xe164e69a'  # perf
-    macip_acl_dump: '0xef34fea4'  # perf
+    macip_acl_details: '0xe164e69a'  # perf teardown
+    macip_acl_dump: '0xef34fea4'  # perf teardown
     macip_acl_interface_add_del: '0x6a6be97c'  # perf
     macip_acl_interface_add_del_reply: '0xe8d4e804'  # perf
     macip_acl_interface_add_del: '0x6a6be97c'  # perf
     macip_acl_interface_add_del_reply: '0xe8d4e804'  # perf
-    macip_acl_interface_get: '0x51077d14'  # perf
-    macip_acl_interface_get_reply: '0xaccf9b05'  # perf
-    # ^^^^^^^^ tc01-64B-1c-eth-l2bdbasemaclrn-macip-iacl1sl-100flows-mrr
+    macip_acl_interface_get: '0x51077d14'  # perf teardown
+    macip_acl_interface_get_reply: '0xaccf9b05'  # perf teardown
+    # 8x^ tc01-64B-1c-eth-l2bdbasemaclrn-macip-iacl1sl-100flows-mrr
     # ^ macipANDacl1AND100_flows
     memif_create: '0xb1b25061'  # dev
     memif_create_reply: '0x5383d31f'  # dev
     # ^ macipANDacl1AND100_flows
     memif_create: '0xb1b25061'  # dev
     memif_create_reply: '0x5383d31f'  # dev
     nat44_interface_add_del_feature_reply: '0xe8d4e804'  # perf
     nat_det_add_del_map: '0x112fde05'  # perf
     nat_det_add_del_map_reply: '0xe8d4e804'  # perf
     nat44_interface_add_del_feature_reply: '0xe8d4e804'  # perf
     nat_det_add_del_map: '0x112fde05'  # perf
     nat_det_add_del_map_reply: '0xe8d4e804'  # perf
-    # ^^^^ tc01-64B-1c-ethip4udp-ip4base-nat44-mrr
+    nat_show_config: '0x51077d14'  # perf teardown
+    nat_show_config_reply: '0x006a0786'  # perf teardown
+    # 6x^ tc01-64B-1c-ethip4udp-ip4base-nat44-mrr
     # ^ nat44NOTscaleNOTsrc_user_1
     # ^ nat44NOTscaleNOTsrc_user_1
-    proxy_arp_intfc_enable_disable: '0x69d24598'  # virl
-    proxy_arp_intfc_enable_disable_reply: '0xe8d4e804'  # virl
-    show_lisp_status: '0x51077d14'  # virl
-    show_lisp_status_reply: '0xddcf48ef'  # virl
+    # show_lisp_map_register_state / reply # honeycomb
+    # show_lisp_map_request_mode / reply # honeycomb
+    # show_lisp_pitr / reply # honeycomb
+    # show_lisp_rloc_probe_state / reply # honeycomb
+    # show_lisp_status / reply # unused L2 keywords: LISP should be enabled / disabled
+    # show_lisp_use_petr / reply # honeycomb
     show_threads: '0x51077d14'  # dev
     show_threads_reply: '0xf5e0b66f'  # dev
     show_version: '0x51077d14'  # dev setup
     show_version_reply: '0xc919bde1'  # dev setup
     sr_localsid_add_del: '0xb30489eb'  # perf
     sr_localsid_add_del_reply: '0xe8d4e804'  # perf
     show_threads: '0x51077d14'  # dev
     show_threads_reply: '0xf5e0b66f'  # dev
     show_version: '0x51077d14'  # dev setup
     show_version_reply: '0xc919bde1'  # dev setup
     sr_localsid_add_del: '0xb30489eb'  # perf
     sr_localsid_add_del_reply: '0xe8d4e804'  # perf
-    sr_localsids_details: '0x0791babc'  # perf
-    sr_localsids_dump: '0x51077d14'  # perf
-    sr_policies_details: '0x5087f460'  # perf
-    sr_policies_dump: '0x51077d14'  # perf
+    sr_localsids_details: '0x0791babc'  # perf teardown
+    sr_localsids_dump: '0x51077d14'  # perf teardown
+    sr_policies_details: '0x5087f460'  # perf teardown
+    sr_policies_dump: '0x51077d14'  # perf teardown
     sr_policy_add: '0x4b6e2484'  # perf
     sr_policy_add_reply: '0xe8d4e804'  # perf
     sr_set_encap_source: '0xd05bb4de'  # perf
     sr_set_encap_source_reply: '0xe8d4e804'  # perf
     sr_steering_add_del: '0x28b5dcab'  # perf
     sr_steering_add_del_reply: '0xe8d4e804'  # perf
     sr_policy_add: '0x4b6e2484'  # perf
     sr_policy_add_reply: '0xe8d4e804'  # perf
     sr_set_encap_source: '0xd05bb4de'  # perf
     sr_set_encap_source_reply: '0xe8d4e804'  # perf
     sr_steering_add_del: '0x28b5dcab'  # perf
     sr_steering_add_del_reply: '0xe8d4e804'  # perf
-    sr_steering_pol_details: '0x5627d483'  # perf
-    sr_steering_pol_dump: '0x51077d14'  # perf
+    sr_steering_pol_details: '0x5627d483'  # perf teardown
+    sr_steering_pol_dump: '0x51077d14'  # perf teardown
     # x^ tc01-78B-1c-ethip6ip6-ip6base-srv6enc1sid-mrr
     # ^ srv6_1sid
     sw_interface_add_del_address: '0x5803d5c4'  # dev
     sw_interface_add_del_address_reply: '0xe8d4e804'  # dev
     # x^ tc01-78B-1c-ethip6ip6-ip6base-srv6enc1sid-mrr
     # ^ srv6_1sid
     sw_interface_add_del_address: '0x5803d5c4'  # dev
     sw_interface_add_del_address_reply: '0xe8d4e804'  # dev
-    sw_interface_bond_details: '0xa8ac5a5f'  # perf
+    sw_interface_bond_details: '0xf5ef2106'  # perf
     sw_interface_bond_dump: '0x51077d14'  # perf
     # ^^ see bond_*
     sw_interface_details: '0x077403e2'  # dev setup
     sw_interface_dump: '0xaa610c27'  # dev setup
     sw_interface_bond_dump: '0x51077d14'  # perf
     # ^^ see bond_*
     sw_interface_details: '0x077403e2'  # dev setup
     sw_interface_dump: '0xaa610c27'  # dev setup
+    # sw_interface_get_table / reply # honeycomb
     sw_interface_ip6nd_ra_config: '0xc3f02daa'  # dev
     sw_interface_ip6nd_ra_config_reply: '0xe8d4e804'  # dev
     sw_interface_rx_placement_details: '0xf6d7d024'  # perf
     sw_interface_ip6nd_ra_config: '0xc3f02daa'  # dev
     sw_interface_ip6nd_ra_config_reply: '0xe8d4e804'  # dev
     sw_interface_rx_placement_details: '0xf6d7d024'  # perf
     sw_interface_set_l2_xconnect_reply: '0xe8d4e804'  # dev
     sw_interface_set_rx_placement: '0xdb65f3c9'  # perf
     sw_interface_set_rx_placement_reply: '0xe8d4e804'  # perf
     sw_interface_set_l2_xconnect_reply: '0xe8d4e804'  # dev
     sw_interface_set_rx_placement: '0xdb65f3c9'  # perf
     sw_interface_set_rx_placement_reply: '0xe8d4e804'  # perf
-    # ^^ tc01-64B-1c-eth-l2xcbase-eth-2memif-1dcr-mrr
-    # ^ l2xcfwdANDbaseANDlxcANDmemif
+    # ^^ see tc01-64B-1c-dot1q-l2bdbasemaclrn-eth-2memif-1dcr-mrr above
     sw_interface_set_table: '0xdf42a577'  # dev
     sw_interface_set_table_reply: '0xe8d4e804'  # dev
     sw_interface_set_unnumbered: '0x938ef33b'  # perf
     sw_interface_set_table: '0xdf42a577'  # dev
     sw_interface_set_table_reply: '0xe8d4e804'  # dev
     sw_interface_set_unnumbered: '0x938ef33b'  # perf
     # ^^ see ipsec_tunnel_if_add_del
     sw_interface_set_vxlan_bypass: '0xe74ca095'  # dev
     sw_interface_set_vxlan_bypass_reply: '0xe8d4e804'  # dev
     # ^^ see ipsec_tunnel_if_add_del
     sw_interface_set_vxlan_bypass: '0xe74ca095'  # dev
     sw_interface_set_vxlan_bypass_reply: '0xe8d4e804'  # dev
-    sw_interface_slave_details: '0xeef053c2'  # perf
-    sw_interface_slave_dump: '0x529cb13f'  # perf
+    sw_interface_slave_details: '0x3c4a0e23'  # perf
+    sw_interface_slave_dump: '0xf9e6675e'  # perf
     # ^^ see bond_*
     # ^^ see bond_*
+    # sw_interface_span_dump / details # honeycomb
     sw_interface_tap_v2_dump: '0x51077d14'  # dev
     sw_interface_tap_v2_details: '0x5ee87a5f'  # dev
     sw_interface_vhost_user_details: '0x91ff3307'  # dev
     sw_interface_vhost_user_dump: '0x51077d14'  # dev
     tap_create_v2: '0x8fa99320'  # dev
     tap_create_v2_reply: '0xfda5941f'  # dev
     sw_interface_tap_v2_dump: '0x51077d14'  # dev
     sw_interface_tap_v2_details: '0x5ee87a5f'  # dev
     sw_interface_vhost_user_details: '0x91ff3307'  # dev
     sw_interface_vhost_user_dump: '0x51077d14'  # dev
     tap_create_v2: '0x8fa99320'  # dev
     tap_create_v2_reply: '0xfda5941f'  # dev
-    vxlan_add_del_tunnel: '0x00f4bdd0'  # virl
-    vxlan_add_del_tunnel_reply: '0xfda5941f'  # virl
-    vxlan_tunnel_details: '0xce38e127'  # virl
-    vxlan_tunnel_dump: '0x529cb13f'  # virl
+    vxlan_add_del_tunnel: '0x00f4bdd0'  # dev
+    vxlan_add_del_tunnel_reply: '0xfda5941f'  # dev
+    # vxlan_gpe_tunnel_dump / details # honeycomb
+    # vxlan_tunnel_dump /details # unused L2 keyword: Get VXLAN dump
 
 
 
 
+# Hint to see the currently used command messages:
+# $ egrep -r 'cmd[0-9]* = ["'"'"'][a-z0-9]*_[a-z0-9_]*["'"'"']' | cut -d '=' -f 2- | cut -c 3- | cut -d "'" -f 1 | cut -d '"' -f 1 | env LC_COLLATE=C sort -u | less
+# Independent hint:
+# $ fgrep -rn dump_and_log
+
 # TODO: Create an on-demand perf verify job with a tag expression
 # covering API calls used only in perf tests,
 # and maintain the expression as device test gets more coverage.
 # TODO: Create an on-demand perf verify job with a tag expression
 # covering API calls used only in perf tests,
 # and maintain the expression as device test gets more coverage.
-# Currently two triggers are needed (3n vs 2n):
-# csit-3n-skx-perftest
+# Currently 2 triggers are needed: 2n (with AVF) vs 3n (with hwcrypto):
+# csit-3n-hsw-perftest
 # mrrAND1cAND64bANDnic_intel-x710ANDip4fwdANDiaclANDacl10AND100_flows
 # mrrAND1cAND64bANDnic_intel-x710ANDip4fwdANDiaclANDacl10AND100_flows
-# mrrAND1cAND64bANDnic_intel-x710ANDl2bdmaclrnANDbaseANDdrv_avf
-# mrrAND1cAND64bANDnic_intel-x710ANDl2patchANDdrv_avf
 # mrrAND1cAND64bANDnic_intel-x710ANDnat44NOTscaleNOTsrc_user_1
 # mrrAND1cAND64bANDnic_intel-x710ANDdot1qANDl2bdmaclrnANDbaseANDmemif
 # mrrAND1cAND64bANDnic_intel-x710ANDnat44NOTscaleNOTsrc_user_1
 # mrrAND1cAND64bANDnic_intel-x710ANDdot1qANDl2bdmaclrnANDbaseANDmemif
-# mrrAND1cAND64bANDnic_intel-x710ANDl2xcfwdANDbaseANDlxcANDmemif
 # mrrAND1cAND64bANDnic_intel-x710ANDl2xcfwdANDlbond_1l
 # mrrAND1cAND64bANDnic_intel-x710ANDmacipANDacl1AND100_flows
 # mrrAND1cAND78bANDnic_intel-x710ANDsrv6_1sid
 # mrrAND1cAND64bANDnic_intel-x710ANDl2xcfwdANDlbond_1l
 # mrrAND1cAND64bANDnic_intel-x710ANDmacipANDacl1AND100_flows
 # mrrAND1cAND78bANDnic_intel-x710ANDsrv6_1sid
+# mrrAND1cAND64bANDnic_intel-xl710ANDipsechwANDtnl_1ANDaes_256_gcm
+# mrrAND1cAND64bANDnic_intel-xl710ANDipsecANDlispgpe
 # csit-2n-skx-perftest
 # mrrAND1cAND64bANDnic_intel-x710ANDdot1qANDbaseANDdrv_avfANDgbp
 # csit-2n-skx-perftest
 # mrrAND1cAND64bANDnic_intel-x710ANDdot1qANDbaseANDdrv_avfANDgbp
+# mrrAND1cAND64bANDnic_intel-x710ANDl2patchANDdrv_avf
 
 # TODO: Once device job has complete API coverage,
 # add a check to make sure each message was encountered;
 
 # TODO: Once device job has complete API coverage,
 # add a check to make sure each message was encountered;
index f60972f..24fda52 100644 (file)
@@ -42,8 +42,9 @@ class Memif(object):
         :returns: List of memif interfaces extracted from Papi response.
         :rtype: list
         """
         :returns: List of memif interfaces extracted from Papi response.
         :rtype: list
         """
+        cmd = "memif_dump"
         with PapiSocketExecutor(node) as papi_exec:
         with PapiSocketExecutor(node) as papi_exec:
-            details = papi_exec.add("memif_dump").get_details()
+            details = papi_exec.add(cmd).get_details()
 
         for memif in details:
             memif["hw_addr"] = str(memif["hw_addr"])
 
         for memif in details:
             memif["hw_addr"] = str(memif["hw_addr"])
index aec43b6..d651e78 100644 (file)
@@ -19,6 +19,7 @@ import copy
 import glob
 import json
 import shutil
 import glob
 import json
 import shutil
+import struct  # vpp-papi can raise struct.error
 import subprocess
 import sys
 import tempfile
 import subprocess
 import sys
 import tempfile
@@ -95,8 +96,9 @@ class PapiSocketExecutor(object):
 
     Note: Use only with "with" statement, e.g.:
 
 
     Note: Use only with "with" statement, e.g.:
 
+        cmd = 'show_version'
         with PapiSocketExecutor(node) as papi_exec:
         with PapiSocketExecutor(node) as papi_exec:
-            reply = papi_exec.add('show_version').get_reply(err_msg)
+            reply = papi_exec.add(cmd).get_reply(err_msg)
 
     This class processes two classes of VPP PAPI methods:
     1. Simple request / reply: method='request'.
 
     This class processes two classes of VPP PAPI methods:
     1. Simple request / reply: method='request'.
@@ -110,8 +112,9 @@ class PapiSocketExecutor(object):
 
     a. One request with no arguments:
 
 
     a. One request with no arguments:
 
+        cmd = 'show_version'
         with PapiSocketExecutor(node) as papi_exec:
         with PapiSocketExecutor(node) as papi_exec:
-            reply = papi_exec.add('show_version').get_reply(err_msg)
+            reply = papi_exec.add(cmd).get_reply(err_msg)
 
     b. Three requests with arguments, the second and the third ones are the same
        but with different arguments.
 
     b. Three requests with arguments, the second and the third ones are the same
        but with different arguments.
@@ -293,7 +296,7 @@ class PapiSocketExecutor(object):
         for _ in xrange(2):
             try:
                 vpp_instance.connect_sync("csit_socket")
         for _ in xrange(2):
             try:
                 vpp_instance.connect_sync("csit_socket")
-            except IOError as err:
+            except (IOError, struct.error) as err:
                 logger.warn("Got initial connect error {err!r}".format(err=err))
                 vpp_instance.disconnect()
             else:
                 logger.warn("Got initial connect error {err!r}".format(err=err))
                 vpp_instance.disconnect()
             else:
@@ -419,29 +422,29 @@ class PapiSocketExecutor(object):
         return self._execute(err_msg)
 
     @staticmethod
         return self._execute(err_msg)
 
     @staticmethod
-    def run_cli_cmd(node, cmd, log=True,
+    def run_cli_cmd(node, cli_cmd, log=True,
                     remote_vpp_socket=Constants.SOCKSVR_PATH):
         """Run a CLI command as cli_inband, return the "reply" field of reply.
 
         Optionally, log the field value.
 
         :param node: Node to run command on.
                     remote_vpp_socket=Constants.SOCKSVR_PATH):
         """Run a CLI command as cli_inband, return the "reply" field of reply.
 
         Optionally, log the field value.
 
         :param node: Node to run command on.
-        :param cmd: The CLI command to be run on the node.
+        :param cli_cmd: The CLI command to be run on the node.
         :param remote_vpp_socket: Path to remote socket to tunnel to.
         :param log: If True, the response is logged.
         :type node: dict
         :type remote_vpp_socket: str
         :param remote_vpp_socket: Path to remote socket to tunnel to.
         :param log: If True, the response is logged.
         :type node: dict
         :type remote_vpp_socket: str
-        :type cmd: str
+        :type cli_cmd: str
         :type log: bool
         :returns: CLI output.
         :rtype: str
         """
         :type log: bool
         :returns: CLI output.
         :rtype: str
         """
-        cli = 'cli_inband'
-        args = dict(cmd=cmd)
+        cmd = 'cli_inband'
+        args = dict(cmd=cli_cmd)
         err_msg = "Failed to run 'cli_inband {cmd}' PAPI command on host " \
                   "{host}".format(host=node['host'], cmd=cmd)
         with PapiSocketExecutor(node, remote_vpp_socket) as papi_exec:
         err_msg = "Failed to run 'cli_inband {cmd}' PAPI command on host " \
                   "{host}".format(host=node['host'], cmd=cmd)
         with PapiSocketExecutor(node, remote_vpp_socket) as papi_exec:
-            reply = papi_exec.add(cli, **args).get_reply(err_msg)["reply"]
+            reply = papi_exec.add(cmd, **args).get_reply(err_msg)["reply"]
         if log:
             logger.info(
                 "{cmd} ({host} - {remote_vpp_socket}):\n{reply}".
         if log:
             logger.info(
                 "{cmd} ({host} - {remote_vpp_socket}):\n{reply}".
@@ -450,21 +453,21 @@ class PapiSocketExecutor(object):
         return reply
 
     @staticmethod
         return reply
 
     @staticmethod
-    def run_cli_cmd_on_all_sockets(node, cmd, log=True):
+    def run_cli_cmd_on_all_sockets(node, cli_cmd, log=True):
         """Run a CLI command as cli_inband, on all sockets in topology file.
 
         :param node: Node to run command on.
         """Run a CLI command as cli_inband, on all sockets in topology file.
 
         :param node: Node to run command on.
-        :param cmd: The CLI command to be run on the node.
+        :param cli_cmd: The CLI command to be run on the node.
         :param log: If True, the response is logged.
         :type node: dict
         :param log: If True, the response is logged.
         :type node: dict
-        :type cmd: str
+        :type cli_cmd: str
         :type log: bool
         """
         sockets = Topology.get_node_sockets(node, socket_type=SocketType.PAPI)
         if sockets:
             for socket in sockets.values():
                 PapiSocketExecutor.run_cli_cmd(
         :type log: bool
         """
         sockets = Topology.get_node_sockets(node, socket_type=SocketType.PAPI)
         if sockets:
             for socket in sockets.values():
                 PapiSocketExecutor.run_cli_cmd(
-                    node, cmd, log=log, remote_vpp_socket=socket)
+                    node, cli_cmd, log=log, remote_vpp_socket=socket)
 
     @staticmethod
     def dump_and_log(node, cmds):
 
     @staticmethod
     def dump_and_log(node, cmds):
@@ -511,7 +514,7 @@ class PapiSocketExecutor(object):
             try:
                 try:
                     reply = papi_fn(**command["api_args"])
             try:
                 try:
                     reply = papi_fn(**command["api_args"])
-                except IOError as err:
+                except (IOError, struct.error) as err:
                     # Ocassionally an error happens, try reconnect.
                     logger.warn("Reconnect after error: {err!r}".format(
                         err=err))
                     # Ocassionally an error happens, try reconnect.
                     logger.warn("Reconnect after error: {err!r}".format(
                         err=err))
@@ -521,7 +524,7 @@ class PapiSocketExecutor(object):
                     self.vpp_instance.connect_sync("csit_socket")
                     logger.trace("Reconnected.")
                     reply = papi_fn(**command["api_args"])
                     self.vpp_instance.connect_sync("csit_socket")
                     logger.trace("Reconnected.")
                     reply = papi_fn(**command["api_args"])
-            except (AttributeError, IOError) as err:
+            except (AttributeError, IOError, struct.error) as err:
                 raise_from(AssertionError(err_msg), err, level="INFO")
             # *_dump commands return list of objects, convert, ordinary reply.
             if not isinstance(reply, list):
                 raise_from(AssertionError(err_msg), err, level="INFO")
             # *_dump commands return list of objects, convert, ordinary reply.
             if not isinstance(reply, list):
index 5f885d6..27cc304 100644 (file)
@@ -35,7 +35,7 @@ class Trace(object):
         for node in nodes.values():
             if node['type'] == NodeType.DUT:
                 PapiSocketExecutor.run_cli_cmd(
         for node in nodes.values():
             if node['type'] == NodeType.DUT:
                 PapiSocketExecutor.run_cli_cmd(
-                    node, cmd="show trace {max}".format(max=maximum))
+                    node, "show trace {max}".format(max=maximum))
 
     @staticmethod
     def clear_packet_trace_on_all_duts(nodes):
 
     @staticmethod
     def clear_packet_trace_on_all_duts(nodes):
@@ -46,4 +46,4 @@ class Trace(object):
         """
         for node in nodes.values():
             if node['type'] == NodeType.DUT:
         """
         for node in nodes.values():
             if node['type'] == NodeType.DUT:
-                PapiSocketExecutor.run_cli_cmd(node, cmd="clear trace")
+                PapiSocketExecutor.run_cli_cmd(node, "clear trace")
index 4e2b237..6e3ff70 100644 (file)
@@ -160,8 +160,9 @@ class VPPUtil(object):
         :returns: VPP version.
         :rtype: str
         """
         :returns: VPP version.
         :rtype: str
         """
+        cmd = 'show_version'
         with PapiSocketExecutor(node) as papi_exec:
         with PapiSocketExecutor(node) as papi_exec:
-            reply = papi_exec.add('show_version').get_reply()
+            reply = papi_exec.add(cmd).get_reply()
         return_version = reply['version'].rstrip('\0x00')
         version = 'VPP version:      {ver}\n'.format(ver=return_version)
         if verbose:
         return_version = reply['version'].rstrip('\0x00')
         version = 'VPP version:      {ver}\n'.format(ver=return_version)
         if verbose:
@@ -313,8 +314,9 @@ class VPPUtil(object):
         :returns: VPP thread data.
         :rtype: list
         """
         :returns: VPP thread data.
         :rtype: list
         """
+        cmd = 'show_threads'
         with PapiSocketExecutor(node) as papi_exec:
         with PapiSocketExecutor(node) as papi_exec:
-            reply = papi_exec.add('show_threads').get_reply()
+            reply = papi_exec.add(cmd).get_reply()
 
         threads_data = list()
         for thread in reply["thread_data"]:
 
         threads_data = list()
         for thread in reply["thread_data"]:
index 000bf1c..d4a7675 100644 (file)
 | ${dut2_spi}= | ${1000}
 | ${dut1_spi}= | ${1001}
 | ${ESP_PROTO}= | ${50}
 | ${dut2_spi}= | ${1000}
 | ${dut1_spi}= | ${1001}
 | ${ESP_PROTO}= | ${50}
+| ${tg_if_ip4}= | 192.168.100.2
+| ${dut_if_ip4}= | 192.168.100.3
+| ${tg_lo_ip4}= | 192.168.3.3
+| ${dut_lo_ip4}= | 192.168.4.4
+| ${ip4_plen}= | ${24}
 # Traffic profile:
 | ${traffic_profile}= | trex-sl-3n-ethip4-ip4src253
 
 # Traffic profile:
 | ${traffic_profile}= | trex-sl-3n-ethip4-ip4src253
 
@@ -93,6 +98,7 @@
 | | And Add PCI devices to all DUTs
 | | And Set Max Rate And Jumbo And Handle Multi Seg
 | | And Add cryptodev to all DUTs | ${phy_cores}
 | | And Add PCI devices to all DUTs
 | | And Set Max Rate And Jumbo And Handle Multi Seg
 | | And Add cryptodev to all DUTs | ${phy_cores}
+| | And Configure topology for IPv4 IPsec testing
 | | And Apply startup configuration on all VPP DUTs
 | | When Generate keys for IPSec | ${encr_alg} | ${auth_alg}
 | | When Initialize LISP GPE IPv4 over IPsec in 3-node circular topology
 | | And Apply startup configuration on all VPP DUTs
 | | When Generate keys for IPSec | ${encr_alg} | ${auth_alg}
 | | When Initialize LISP GPE IPv4 over IPsec in 3-node circular topology