1 # Copyright (c) 2018 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
14 """Implementation of keywords for Honeycomb setup."""
16 from json import loads
17 from time import time, sleep
19 from ipaddress import IPv6Address, AddressValueError
21 from robot.api import logger
23 from resources.libraries.python.HTTPRequest import HTTPRequest, HTTPCodes, \
25 from resources.libraries.python.constants import Constants as Const
26 from resources.libraries.python.honeycomb.HoneycombUtil import HoneycombError
27 from resources.libraries.python.honeycomb.HoneycombUtil \
28 import HoneycombUtil as HcUtil
29 from resources.libraries.python.ssh import SSH
30 from resources.libraries.python.topology import NodeType
33 class HoneycombSetup(object):
34 """Implements keywords for Honeycomb setup.
36 The keywords implemented in this class make possible to:
39 - check the Honeycomb start-up state,
40 - check the Honeycomb shutdown state,
41 - add VPP to the topology.
48 def start_honeycomb_on_duts(*nodes):
49 """Start Honeycomb on specified DUT nodes.
51 This keyword starts the Honeycomb service on specified DUTs.
52 The keyword just starts the Honeycomb and does not check its startup
53 state. Use the keyword "Check Honeycomb Startup State" to check if the
54 Honeycomb is up and running.
55 Honeycomb must be installed in "/opt" directory, otherwise the start
58 :param nodes: List of nodes to start Honeycomb on.
60 :raises HoneycombError: If Honeycomb fails to start.
63 HoneycombSetup.print_environment(nodes)
65 cmd = "sudo service honeycomb start"
68 if node['type'] == NodeType.DUT:
70 "\n(re)Starting Honeycomb service on node {0}".format(
74 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
75 if int(ret_code) != 0:
76 raise HoneycombError('Node {0} failed to start Honeycomb.'.
79 logger.info("Starting the Honeycomb service on node {0} is "
80 "in progress ...".format(node['host']))
83 def stop_honeycomb_on_duts(*nodes):
84 """Stop the Honeycomb service on specified DUT nodes.
86 This keyword stops the Honeycomb service on specified nodes. It just
87 stops the Honeycomb and does not check its shutdown state. Use the
88 keyword "Check Honeycomb Shutdown State" to check if Honeycomb has
91 :param nodes: List of nodes to stop Honeycomb on.
93 :raises HoneycombError: If Honeycomb failed to stop.
96 cmd = "sudo service honeycomb stop"
100 if node['type'] == NodeType.DUT:
102 "\nShutting down Honeycomb service on node {0}".format(
106 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
107 if int(ret_code) != 0:
108 errors.append(node['host'])
110 logger.info("Stopping the Honeycomb service on node {0} is "
111 "in progress ...".format(node['host']))
113 raise HoneycombError('Node(s) {0} failed to stop Honeycomb.'.
117 def restart_honeycomb_on_dut(node):
118 """Restart Honeycomb on specified DUT nodes.
120 This keyword restarts the Honeycomb service on specified DUTs. Use the
121 keyword "Check Honeycomb Startup State" to check if the Honeycomb is up
124 :param node: Node to restart Honeycomb on.
126 :raises HoneycombError: If Honeycomb fails to start.
130 "\n(re)Starting Honeycomb service on node {0}".format(node["host"]))
132 cmd = "sudo service honeycomb restart"
136 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
137 if int(ret_code) != 0:
138 raise HoneycombError('Node {0} failed to restart Honeycomb.'.
139 format(node['host']))
142 "Honeycomb service restart is in progress on node {0}".format(
146 def check_honeycomb_startup_state(node, timeout=360, retries=20,
148 """Repeatedly check the status of Honeycomb startup until it is fully
149 started or until timeout or max retries is reached.
151 :param node: Honeycomb node.
152 :param timeout: Timeout value in seconds.
153 :param retries: Max number of retries.
154 :param interval: Interval between checks, in seconds.
159 :raises HoneycombError: If the Honeycomb process IP cannot be found,
160 or if timeout or number of retries is exceeded.
168 while time() - start < timeout and count < retries:
172 status_code_version, _ = HcUtil.get_honeycomb_data(
173 node, "oper_vpp_version")
174 status_code_if_cfg, _ = HcUtil.get_honeycomb_data(
175 node, "config_vpp_interfaces")
176 status_code_if_oper, _ = HcUtil.get_honeycomb_data(
177 node, "oper_vpp_interfaces")
178 except HTTPRequestError:
181 if status_code_if_cfg == HTTPCodes.OK\
182 and status_code_if_cfg == HTTPCodes.OK\
183 and status_code_if_oper == HTTPCodes.OK:
184 logger.info("Check successful, Honeycomb is up and running.")
188 "Attempt ${count} failed on Restconf check. Status codes:\n"
189 "Version: {version}\n"
190 "Interface config: {if_cfg}\n"
191 "Interface operational: {if_oper}".format(
193 version=status_code_version,
194 if_cfg=status_code_if_cfg,
195 if_oper=status_code_if_oper))
199 _, vpp_status, _ = ssh.exec_command("sudo service vpp status")
200 raise HoneycombError(
201 "Timeout or max retries exceeded. Status of VPP:\n"
202 "{vpp_status}".format(vpp_status=vpp_status))
205 def check_honeycomb_shutdown_state(node):
206 """Check state of Honeycomb service during shutdown on specified nodes.
208 Honeycomb nodes reply with connection refused or the following status
209 codes depending on shutdown progress: codes 200, 404.
211 :param node: List of DUT nodes stopping Honeycomb.
213 :returns: True if all GETs fail to connect.
216 cmd = "pgrep honeycomb"
220 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
222 raise HoneycombError('Honeycomb on node {0} is still '
223 'running.'.format(node['host']),
224 enable_logging=False)
226 logger.info("Honeycomb on node {0} has stopped".
227 format(node['host']))
231 def configure_restconf_binding_address(node):
232 """Configure Honeycomb to accept restconf requests from all IP
233 addresses. IP version is determined by node data.
235 :param node: Information about a DUT node.
237 :raises HoneycombError: If the configuration could not be changed.
240 find = "restconf-binding-address"
242 IPv6Address(unicode(node["host"]))
243 # if management IP of the node is in IPv6 format
244 replace = '\\"restconf-binding-address\\": \\"0::0\\",'
245 except (AttributeError, AddressValueError):
246 replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",'
248 argument = '"/{0}/c\\ {1}"'.format(find, replace)
249 path = "{0}/config/restconf.json".format(Const.REMOTE_HC_DIR)
250 command = "sed -i {0} {1}".format(argument, path)
254 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
256 raise HoneycombError("Failed to modify configuration on "
257 "node {0}, {1}".format(node, stderr))
260 def configure_jvpp_timeout(node, timeout=10):
261 """Configure timeout value for Java API commands Honeycomb sends to VPP.
263 :param node: Information about a DUT node.
264 :param timeout: Timeout value in seconds.
267 :raises HoneycombError: If the configuration could not be changed.
270 find = "jvpp-request-timeout"
271 replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout)
273 argument = '"/{0}/c\\ {1}"'.format(find, replace)
274 path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR)
275 command = "sed -i {0} {1}".format(argument, path)
279 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
281 raise HoneycombError("Failed to modify configuration on "
282 "node {0}, {1}".format(node, stderr))
285 def print_environment(nodes):
286 """Print information about the nodes to log. The information is defined
287 by commands in cmds tuple at the beginning of this method.
289 :param nodes: List of DUT nodes to get information about.
293 # TODO: When everything is set and running in VIRL env, transform this
294 # method to a keyword checking the environment.
302 "dpkg --list | grep openjdk",
303 "ls -la /opt/honeycomb",
304 "cat /opt/honeycomb/modules/*module-config")
307 if node['type'] == NodeType.DUT:
308 logger.info("Checking node {} ...".format(node['host']))
310 logger.info("Command: {}".format(cmd))
313 ssh.exec_command_sudo(cmd)
316 def print_ports(node):
317 """Uses "sudo netstat -anp | grep java" to print port where a java
320 :param node: Honeycomb node where we want to print the ports.
324 cmds = ("netstat -anp | grep java",
325 "ps -ef | grep [h]oneycomb")
327 logger.info("Checking node {} ...".format(node['host']))
329 logger.info("Command: {}".format(cmd))
332 ssh.exec_command_sudo(cmd)
335 def configure_log_level(node, level):
336 """Set Honeycomb logging to the specified level.
338 :param node: Honeycomb node.
339 :param level: Log level (INFO, DEBUG, TRACE).
344 find = 'logger name=\\"io.fd\\"'
345 replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level)
347 argument = '"/{0}/c\\ {1}"'.format(find, replace)
348 path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR)
349 command = "sed -i {0} {1}".format(argument, path)
353 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
355 raise HoneycombError("Failed to modify configuration on "
356 "node {0}, {1}".format(node, stderr))
359 def manage_honeycomb_features(node, feature, disable=False):
360 """Configure Honeycomb to use features that are disabled by default, or
361 disable previously enabled features.
363 ..Note:: If the module is not enabled in VPP, Honeycomb will
364 be unable to establish VPP connection.
366 :param node: Honeycomb node.
367 :param feature: Feature to enable.
368 :param disable: Disable the specified feature instead of enabling it.
370 :type feature: string
372 :raises HoneycombError: If the configuration could not be changed.
375 disabled_features = {
376 "NSH": ["io.fd.hc2vpp.vppnsh.impl.VppNshModule"],
377 "BGP": ["io.fd.hc2vpp.bgp.inet.BgpInetModule",
378 "io.fd.honeycomb.infra.bgp.BgpModule",
379 "io.fd.honeycomb.infra.bgp.BgpReadersModule",
380 "io.fd.honeycomb.infra.bgp.BgpWritersModule",
381 "io.fd.honeycomb.northbound.bgp.extension.InetModule",
382 "io.fd.honeycomb.northbound.bgp.extension.EvpnModule",
383 "io.fd.honeycomb.northbound.bgp.extension.L3VpnV4Module",
384 "io.fd.honeycomb.northbound.bgp.extension.L3VpnV6Module",
385 "io.fd.honeycomb.northbound.bgp.extension."
386 "LabeledUnicastModule",
387 "io.fd.honeycomb.northbound.bgp.extension.LinkstateModule"]
393 if feature in disabled_features.keys():
394 # for every module, uncomment by replacing the entire line
395 for item in disabled_features[feature]:
396 find = replace = "{0}".format(item)
398 replace = "// {0}".format(find)
400 argument = '"/{0}/c\\ {1}"'.format(find, replace)
401 path = "{0}/modules/*module-config"\
402 .format(Const.REMOTE_HC_DIR)
403 command = "sed -i {0} {1}".format(argument, path)
405 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
407 raise HoneycombError("Failed to modify configuration on "
408 "node {0}, {1}".format(node, stderr))
410 raise HoneycombError(
411 "Unrecognized feature {0}.".format(feature))
414 def copy_java_libraries(node):
415 """Copy Java libraries installed by vpp-api-java package to honeycomb
418 This is a (temporary?) workaround for jvpp version mismatches.
420 :param node: Honeycomb node
426 (_, stdout, _) = ssh.exec_command_sudo(
427 "ls /usr/share/java | grep ^jvpp-*")
429 files = stdout.split("\n")[:-1]
432 # jvpp-registry-17.04.jar
433 # jvpp-core-17.04.jar
435 parts = item.split("-")
436 version = "{0}-SNAPSHOT".format(parts[2][:5])
437 artifact_id = "{0}-{1}".format(parts[0], parts[1])
439 directory = "{0}/lib/io/fd/vpp/{1}/{2}".format(
440 Const.REMOTE_HC_DIR, artifact_id, version)
441 cmd = "sudo mkdir -p {0}; " \
442 "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format(
443 directory, item, artifact_id, version)
445 (ret_code, _, stderr) = ssh.exec_command(cmd)
447 raise HoneycombError("Failed to copy JVPP libraries on "
448 "node {0}, {1}".format(node, stderr))
451 def copy_odl_client(node, odl_name, src_path, dst_path):
452 """Copy ODL Client from source path to destination path.
454 :param node: Honeycomb node.
455 :param odl_name: Name of ODL client version to use.
456 :param src_path: Source Path where to find ODl client.
457 :param dst_path: Destination path.
462 :raises HoneycombError: If the operation fails.
468 cmd = "sudo rm -rf {dst}/*karaf_{odl_name} && " \
469 "cp -r {src}/*karaf_{odl_name}* {dst}".format(
470 src=src_path, odl_name=odl_name, dst=dst_path)
472 ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=180)
473 if int(ret_code) != 0:
474 raise HoneycombError(
475 "Failed to copy ODL client on node {0}".format(node["host"]))
478 def setup_odl_client(node, path):
479 """Start ODL client on the specified node.
481 Karaf should be located in the provided path, and VPP and Honeycomb
482 should already be running, otherwise the start will fail.
484 :param node: Node to start ODL client on.
485 :param path: Path to ODL client on node.
488 :raises HoneycombError: If Honeycomb fails to start.
491 logger.console("\nStarting ODL client ...")
495 cmd = "{path}/*karaf*/bin/start clean".format(path=path)
496 ret_code, _, _ = ssh.exec_command_sudo(cmd)
498 if int(ret_code) != 0:
499 raise HoneycombError('Node {0} failed to start ODL.'.
500 format(node['host']))
502 logger.info("Starting the ODL client on node {0} is "
503 "in progress ...".format(node['host']))
506 def install_odl_features(node, odl_name, path, *features):
507 """Install required features on a running ODL client.
509 :param node: Honeycomb node.
510 :param odl_name: Name of ODL client version to use.
511 :param path: Path to ODL client on node.
512 :param features: Optional, list of additional features to install.
523 if odl_name.lower() == "oxygen" or odl_name.lower() == "fluorine":
524 auth = "-u karaf -p karaf"
526 cmd = "{path}/*karaf*/bin/client {auth} feature:install " \
527 "odl-restconf-all " \
528 "odl-netconf-connector-all " \
529 "odl-netconf-topology".format(path=path, auth=auth)
530 for feature in features:
531 cmd += " {0}".format(feature)
533 ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=250)
535 if int(ret_code) != 0:
536 raise HoneycombError("Feature install did not succeed.")
539 def check_odl_startup_state(node):
540 """Check the status of ODL client startup.
542 :param node: Honeycomb node.
544 :returns: True when ODL is started.
546 :raises HoneycombError: When the response is not code 200: OK.
549 path = HcUtil.read_path_from_url_file(
550 "odl_client/odl_netconf_connector")
551 expected_status_codes = (HTTPCodes.UNAUTHORIZED,
554 HTTPCodes.SERVICE_UNAVAILABLE,
555 HTTPCodes.INTERNAL_SERVER_ERROR)
557 status_code, _ = HTTPRequest.get(node, path, timeout=10,
558 enable_logging=False)
559 if status_code == HTTPCodes.OK:
560 logger.info("ODL client on node {0} is up and running".
561 format(node['host']))
562 elif status_code in expected_status_codes:
563 if status_code == HTTPCodes.UNAUTHORIZED:
564 logger.info('Unauthorized. If this triggers keyword '
565 'timeout, verify username and password.')
566 raise HoneycombError('ODL client on node {0} running but '
567 'not yet ready.'.format(node['host']),
568 enable_logging=False)
570 raise HoneycombError('Unexpected return code: {0}.'.
575 def check_odl_shutdown_state(node):
576 """Check the status of ODL client shutdown.
578 :param node: Honeycomb node.
580 :returns: True when ODL is stopped.
582 :raises HoneycombError: When the response is not code 200: OK.
585 cmd = "pgrep -f karaf"
586 path = HcUtil.read_path_from_url_file(
587 "odl_client/odl_netconf_connector")
590 HTTPRequest.get(node, path, timeout=10, enable_logging=False)
591 raise HoneycombError("ODL client is still running.")
592 except HTTPRequestError:
593 logger.debug("Connection refused, checking process state....")
596 ret_code, _, _ = ssh.exec_command(cmd)
598 raise HoneycombError("ODL client is still running.")
603 def mount_honeycomb_on_odl(node):
604 """Tell ODL client to mount Honeycomb instance over netconf.
606 :param node: Honeycomb node.
608 :raises HoneycombError: When the response is not code 200: OK.
611 path = HcUtil.read_path_from_url_file(
612 "odl_client/odl_netconf_connector")
614 url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC,
615 "odl_client/mount_honeycomb.json")
617 with open(url_file) as template:
618 data = template.read()
622 status_code, _ = HTTPRequest.post(
625 headers={"Content-Type": "application/json",
626 "Accept": "text/plain"},
629 enable_logging=False)
631 if status_code == HTTPCodes.OK:
632 logger.info("ODL mount point configured successfully.")
633 elif status_code == HTTPCodes.CONFLICT:
634 logger.info("ODL mount point was already configured.")
636 raise HoneycombError('Mount point configuration not successful')
639 def stop_odl_client(node, path):
640 """Stop ODL client service on the specified node.
642 :param node: Node to start ODL client on.
643 :param path: Path to ODL client.
646 :raises HoneycombError: If ODL client fails to stop.
652 cmd = "{0}/*karaf*/bin/stop".format(path)
656 ret_code, _, _ = ssh.exec_command_sudo(cmd)
657 if int(ret_code) != 0:
658 logger.debug("ODL Client refused to shut down.")
659 cmd = "pkill -f 'karaf'"
660 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
661 if int(ret_code) != 0:
662 raise HoneycombError('Node {0} failed to stop ODL.'.
663 format(node['host']))
665 logger.info("ODL client service stopped.")
668 def set_static_arp(node, ip_address, mac_address):
669 """Configure a static ARP entry using arp.
671 :param node: Node in topology.
672 :param ip_address: IP address for the entry.
673 :param mac_address: MAC adddress for the entry.
675 :type ip_address: str
676 :type mac_address: str
677 :raises RuntimeError: If the operation fails.
682 ret_code, _, _ = ssh.exec_command_sudo("arp -s {0} {1}".format(
683 ip_address, mac_address))
686 raise RuntimeError("Failed to configure static ARP adddress.")
689 class HoneycombStartupConfig(object):
690 """Generator for Honeycomb startup configuration.
695 self.template = """#!/bin/sh -
698 while [ $STATUS -eq 100 ]
700 {java_call} -jar $(dirname $0)/{jar_filename}
702 echo "Honeycomb exited with status: $STATUS"
703 if [ $STATUS -eq 100 ]
710 self.java_call = "{scheduler} {affinity} java{jit_mode}{params}"
713 self.core_affinity = ""
721 def apply_config(self, node):
722 """Generate configuration file /opt/honeycomb/honeycomb on the specified
725 :param node: Honeycomb node.
729 self.ssh.connect(node)
730 _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar")
732 java_call = self.java_call.format(scheduler=self.scheduler,
733 affinity=self.core_affinity,
734 jit_mode=self.jit_mode,
736 self.config = self.template.format(java_call=java_call,
737 jar_filename=filename)
739 self.ssh.connect(node)
740 cmd = "echo '{config}' > /tmp/honeycomb " \
741 "&& chmod +x /tmp/honeycomb " \
742 "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".\
743 format(config=self.config)
744 self.ssh.exec_command(cmd)
746 def set_cpu_scheduler(self, scheduler="FIFO"):
747 """Use alternate CPU scheduler.
749 Note: OTHER scheduler doesn't load-balance over isolcpus.
751 :param scheduler: CPU scheduler to use.
755 schedulers = {"FIFO": "-f 99", # First In, First Out
756 "RR": "-r 99", # Round Robin
757 "OTHER": "-o", # Ubuntu default
759 self.scheduler = "chrt {0}".format(schedulers[scheduler])
761 def set_cpu_core_affinity(self, low, high=None):
762 """Set core affinity for the honeycomb process and subprocesses.
764 :param low: Lowest core ID number.
765 :param high: Highest core ID number. Leave empty to use a single core.
770 self.core_affinity = "taskset -c {low}-{high}".format(
771 low=low, high=high if high else low)
773 def set_jit_compiler_mode(self, jit_mode):
774 """Set running mode for Java's JIT compiler.
776 :param jit_mode: Desiret JIT mode.
780 modes = {"client": " -client", # Default
781 "server": " -server", # Higher performance but longer warmup
782 "classic": " -classic" # Disables JIT compiler
785 self.jit_mode = modes[jit_mode]
787 def set_memory_size(self, mem_min, mem_max=None):
788 """Set minimum and maximum memory use for the JVM.
790 :param mem_min: Minimum amount of memory (MB).
791 :param mem_max: Maximum amount of memory (MB). Default is 4 times
797 self.params += " -Xms{min}m -Xmx{max}m".format(
798 min=mem_min, max=mem_max if mem_max else mem_min*4)
800 def set_metaspace_size(self, mem_min, mem_max=None):
801 """Set minimum and maximum memory used for class metadata in the JVM.
803 :param mem_min: Minimum metaspace size (MB).
804 :param mem_max: Maximum metaspace size (MB). Defailt is 4 times
810 self.params += " -XX:MetaspaceSize={min}m " \
811 "-XX:MaxMetaspaceSize={max}m".format(
812 min=mem_min, max=mem_max if mem_max else mem_min*4)
814 def set_numa_optimization(self):
815 """Use optimization of memory use and garbage collection for NUMA
818 self.params += " -XX:+UseNUMA -XX:+UseParallelGC"
820 def set_ssh_security_provider(self):
821 """Disables BouncyCastle for SSHD."""
822 # Workaround for issue described in:
823 # https://wiki.fd.io/view/Honeycomb/Releases/1609/Honeycomb_and_ODL
825 self.params += " -Dorg.apache.sshd.registerBouncyCastle=false"