1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
14 """Implementation of keywords for Honeycomb setup."""
16 from json import loads
17 from time import time, sleep
19 from ipaddress import IPv6Address, AddressValueError
21 from robot.api import logger
23 from resources.libraries.python.HTTPRequest import HTTPRequest, HTTPCodes, \
25 from resources.libraries.python.constants import Constants as Const
26 from resources.libraries.python.honeycomb.HoneycombUtil import HoneycombError
27 from resources.libraries.python.honeycomb.HoneycombUtil \
28 import HoneycombUtil as HcUtil
29 from resources.libraries.python.ssh import SSH
30 from resources.libraries.python.topology import NodeType
33 class HoneycombSetup(object):
34 """Implements keywords for Honeycomb setup.
36 The keywords implemented in this class make possible to:
39 - check the Honeycomb start-up state,
40 - check the Honeycomb shutdown state,
41 - add VPP to the topology.
48 def start_honeycomb_on_duts(*nodes):
49 """Start Honeycomb on specified DUT nodes.
51 This keyword starts the Honeycomb service on specified DUTs.
52 The keyword just starts the Honeycomb and does not check its startup
53 state. Use the keyword "Check Honeycomb Startup State" to check if the
54 Honeycomb is up and running.
55 Honeycomb must be installed in "/opt" directory, otherwise the start
57 :param nodes: List of nodes to start Honeycomb on.
59 :raises HoneycombError: If Honeycomb fails to start.
62 HoneycombSetup.print_environment(nodes)
64 cmd = "sudo service honeycomb start"
67 if node['type'] == NodeType.DUT:
69 "\n(re)Starting Honeycomb service on node {0}".format(
73 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
74 if int(ret_code) != 0:
75 raise HoneycombError('Node {0} failed to start Honeycomb.'.
78 logger.info("Starting the Honeycomb service on node {0} is "
79 "in progress ...".format(node['host']))
82 def stop_honeycomb_on_duts(*nodes):
83 """Stop the Honeycomb service on specified DUT nodes.
85 This keyword stops the Honeycomb service on specified nodes. It just
86 stops the Honeycomb and does not check its shutdown state. Use the
87 keyword "Check Honeycomb Shutdown State" to check if Honeycomb has
89 :param nodes: List of nodes to stop Honeycomb on.
91 :raises HoneycombError: If Honeycomb failed to stop.
94 cmd = "sudo service honeycomb stop"
98 if node['type'] == NodeType.DUT:
100 "\nShutting down Honeycomb service on node {0}".format(
104 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
105 if int(ret_code) != 0:
106 errors.append(node['host'])
108 logger.info("Stopping the Honeycomb service on node {0} is "
109 "in progress ...".format(node['host']))
111 raise HoneycombError('Node(s) {0} failed to stop Honeycomb.'.
115 def restart_honeycomb_on_dut(node):
116 """Restart Honeycomb on specified DUT nodes.
118 This keyword restarts the Honeycomb service on specified DUTs. Use the
119 keyword "Check Honeycomb Startup State" to check if the Honeycomb is up
122 :param node: Node to restart Honeycomb on.
124 :raises HoneycombError: If Honeycomb fails to start.
128 "\n(re)Starting Honeycomb service on node {0}".format(node["host"]))
130 cmd = "sudo service honeycomb restart"
134 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
135 if int(ret_code) != 0:
136 raise HoneycombError('Node {0} failed to restart Honeycomb.'.
137 format(node['host']))
140 "Honeycomb service restart is in progress on node {0}".format(
144 def check_honeycomb_startup_state(node, timeout=360, retries=20,
146 """Repeatedly check the status of Honeycomb startup until it is fully
147 started or until timeout or max retries is reached.
149 :param node: Honeycomb node.
150 :param timeout: Timeout value in seconds.
151 :param retries: Max number of retries.
152 :param interval: Interval between checks, in seconds.
157 :raises HoneycombError: If the Honeycomb process IP cannot be found,
158 or if timeout or number of retries is exceeded."""
165 while time() - start < timeout and count < retries:
169 status_code_version, _ = HcUtil.get_honeycomb_data(
170 node, "oper_vpp_version")
171 status_code_if_cfg, _ = HcUtil.get_honeycomb_data(
172 node, "config_vpp_interfaces")
173 status_code_if_oper, _ = HcUtil.get_honeycomb_data(
174 node, "oper_vpp_interfaces")
175 except HTTPRequestError:
178 if status_code_if_cfg == HTTPCodes.OK\
179 and status_code_if_cfg == HTTPCodes.OK\
180 and status_code_if_oper == HTTPCodes.OK:
181 logger.info("Check successful, Honeycomb is up and running.")
185 "Attempt ${count} failed on Restconf check. Status codes:\n"
186 "Version: {version}\n"
187 "Interface config: {if_cfg}\n"
188 "Interface operational: {if_oper}".format(
190 version=status_code_version,
191 if_cfg=status_code_if_cfg,
192 if_oper=status_code_if_oper))
196 _, vpp_status, _ = ssh.exec_command("sudo service vpp status")
197 raise HoneycombError(
198 "Timeout or max retries exceeded. Status of VPP:\n"
199 "{vpp_status}".format(vpp_status=vpp_status))
202 def check_honeycomb_shutdown_state(node):
203 """Check state of Honeycomb service during shutdown on specified nodes.
205 Honeycomb nodes reply with connection refused or the following status
206 codes depending on shutdown progress: codes 200, 404.
208 :param node: List of DUT nodes stopping Honeycomb.
210 :return: True if all GETs fail to connect.
213 cmd = "pgrep honeycomb"
217 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
219 raise HoneycombError('Honeycomb on node {0} is still '
220 'running.'.format(node['host']),
221 enable_logging=False)
223 logger.info("Honeycomb on node {0} has stopped".
224 format(node['host']))
228 def configure_restconf_binding_address(node):
229 """Configure Honeycomb to accept restconf requests from all IP
230 addresses. IP version is determined by node data.
232 :param node: Information about a DUT node.
234 :raises HoneycombError: If the configuration could not be changed.
237 find = "restconf-binding-address"
239 IPv6Address(unicode(node["host"]))
240 # if management IP of the node is in IPv6 format
241 replace = '\\"restconf-binding-address\\": \\"0::0\\",'
242 except (AttributeError, AddressValueError):
243 replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",'
245 argument = '"/{0}/c\\ {1}"'.format(find, replace)
246 path = "{0}/config/restconf.json".format(Const.REMOTE_HC_DIR)
247 command = "sed -i {0} {1}".format(argument, path)
251 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
253 raise HoneycombError("Failed to modify configuration on "
254 "node {0}, {1}".format(node, stderr))
257 def configure_jvpp_timeout(node, timeout=10):
258 """Configure timeout value for Java API commands Honeycomb sends to VPP.
260 :param node: Information about a DUT node.
261 :param timeout: Timeout value in seconds.
264 :raises HoneycombError: If the configuration could not be changed.
267 find = "jvpp-request-timeout"
268 replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout)
270 argument = '"/{0}/c\\ {1}"'.format(find, replace)
271 path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR)
272 command = "sed -i {0} {1}".format(argument, path)
276 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
278 raise HoneycombError("Failed to modify configuration on "
279 "node {0}, {1}".format(node, stderr))
282 def print_environment(nodes):
283 """Print information about the nodes to log. The information is defined
284 by commands in cmds tuple at the beginning of this method.
286 :param nodes: List of DUT nodes to get information about.
290 # TODO: When everything is set and running in VIRL env, transform this
291 # method to a keyword checking the environment.
299 "dpkg --list | grep openjdk",
300 "ls -la /opt/honeycomb",
301 "cat /opt/honeycomb/modules/*module-config")
304 if node['type'] == NodeType.DUT:
305 logger.info("Checking node {} ...".format(node['host']))
307 logger.info("Command: {}".format(cmd))
310 ssh.exec_command_sudo(cmd)
313 def print_ports(node):
314 """Uses "sudo netstat -anp | grep java" to print port where a java
317 :param node: Honeycomb node where we want to print the ports.
321 cmds = ("netstat -anp | grep java",
322 "ps -ef | grep [h]oneycomb")
324 logger.info("Checking node {} ...".format(node['host']))
326 logger.info("Command: {}".format(cmd))
329 ssh.exec_command_sudo(cmd)
332 def configure_log_level(node, level):
333 """Set Honeycomb logging to the specified level.
335 :param node: Honeycomb node.
336 :param level: Log level (INFO, DEBUG, TRACE).
341 find = 'logger name=\\"io.fd\\"'
342 replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level)
344 argument = '"/{0}/c\\ {1}"'.format(find, replace)
345 path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR)
346 command = "sed -i {0} {1}".format(argument, path)
350 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
352 raise HoneycombError("Failed to modify configuration on "
353 "node {0}, {1}".format(node, stderr))
356 def manage_honeycomb_features(node, feature, disable=False):
357 """Configure Honeycomb to use features that are disabled by default, or
358 disable previously enabled features.
360 ..Note:: If the module is not enabled in VPP, Honeycomb will
361 be unable to establish VPP connection.
363 :param node: Honeycomb node.
364 :param feature: Feature to enable.
365 :param disable: Disable the specified feature instead of enabling it.
367 :type feature: string
369 :raises HoneycombError: If the configuration could not be changed.
372 disabled_features = {
373 "NSH": ["io.fd.hc2vpp.vppnsh.impl.VppNshModule"],
374 "BGP": ["io.fd.hc2vpp.bgp.inet.BgpInetModule",
375 "io.fd.honeycomb.infra.bgp.BgpModule",
376 "io.fd.honeycomb.infra.bgp.BgpReadersModule",
377 "io.fd.honeycomb.infra.bgp.BgpWritersModule",
378 "io.fd.honeycomb.northbound.bgp.extension.InetModule",
379 "io.fd.honeycomb.northbound.bgp.extension.EvpnModule",
380 "io.fd.honeycomb.northbound.bgp.extension.L3VpnV4Module",
381 "io.fd.honeycomb.northbound.bgp.extension.L3VpnV6Module",
382 "io.fd.honeycomb.northbound.bgp.extension."
383 "LabeledUnicastModule",
384 "io.fd.honeycomb.northbound.bgp.extension.LinkstateModule"]
390 if feature in disabled_features.keys():
391 # for every module, uncomment by replacing the entire line
392 for item in disabled_features[feature]:
393 find = replace = "{0}".format(item)
395 replace = "// {0}".format(find)
397 argument = '"/{0}/c\\ {1}"'.format(find, replace)
398 path = "{0}/modules/*module-config"\
399 .format(Const.REMOTE_HC_DIR)
400 command = "sed -i {0} {1}".format(argument, path)
402 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
404 raise HoneycombError("Failed to modify configuration on "
405 "node {0}, {1}".format(node, stderr))
407 raise HoneycombError(
408 "Unrecognized feature {0}.".format(feature))
411 def copy_java_libraries(node):
412 """Copy Java libraries installed by vpp-api-java package to honeycomb
415 This is a (temporary?) workaround for jvpp version mismatches.
417 :param node: Honeycomb node
423 (_, stdout, _) = ssh.exec_command_sudo(
424 "ls /usr/share/java | grep ^jvpp-*")
426 files = stdout.split("\n")[:-1]
429 # jvpp-registry-17.04.jar
430 # jvpp-core-17.04.jar
432 parts = item.split("-")
433 version = "{0}-SNAPSHOT".format(parts[2][:5])
434 artifact_id = "{0}-{1}".format(parts[0], parts[1])
436 directory = "{0}/lib/io/fd/vpp/{1}/{2}".format(
437 Const.REMOTE_HC_DIR, artifact_id, version)
438 cmd = "sudo mkdir -p {0}; " \
439 "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format(
440 directory, item, artifact_id, version)
442 (ret_code, _, stderr) = ssh.exec_command(cmd)
444 raise HoneycombError("Failed to copy JVPP libraries on "
445 "node {0}, {1}".format(node, stderr))
448 def copy_odl_client(node, odl_name, src_path, dst_path):
449 """Copy ODL Client from source path to destination path.
451 :param node: Honeycomb node.
452 :param odl_name: Name of ODL client version to use.
453 :param src_path: Source Path where to find ODl client.
454 :param dst_path: Destination path.
459 :raises HoneycombError: If the operation fails.
465 cmd = "sudo rm -rf {dst}/*karaf_{odl_name} && " \
466 "cp -r {src}/*karaf_{odl_name}* {dst}".format(
467 src=src_path, odl_name=odl_name, dst=dst_path)
469 ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=180)
470 if int(ret_code) != 0:
471 raise HoneycombError(
472 "Failed to copy ODL client on node {0}".format(node["host"]))
475 def setup_odl_client(node, path):
476 """Start ODL client on the specified node.
478 Karaf should be located in the provided path, and VPP and Honeycomb
479 should already be running, otherwise the start will fail.
480 :param node: Node to start ODL client on.
481 :param path: Path to ODL client on node.
484 :raises HoneycombError: If Honeycomb fails to start.
487 logger.console("\nStarting ODL client ...")
491 cmd = "{path}/*karaf*/bin/start clean".format(path=path)
492 ret_code, _, _ = ssh.exec_command_sudo(cmd)
494 if int(ret_code) != 0:
495 raise HoneycombError('Node {0} failed to start ODL.'.
496 format(node['host']))
498 logger.info("Starting the ODL client on node {0} is "
499 "in progress ...".format(node['host']))
502 def install_odl_features(node, path, *features):
503 """Install required features on a running ODL client.
505 :param node: Honeycomb node.
506 :param path: Path to ODL client on node.
507 :param features: Optional, list of additional features to install.
516 cmd = "{path}/*karaf*/bin/client -u karaf feature:install " \
517 "odl-restconf-all " \
518 "odl-netconf-connector-all " \
519 "odl-netconf-topology".format(path=path)
520 for feature in features:
521 cmd += " {0}".format(feature)
523 ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=250)
525 if int(ret_code) != 0:
526 raise HoneycombError("Feature install did not succeed.")
529 def check_odl_startup_state(node):
530 """Check the status of ODL client startup.
532 :param node: Honeycomb node.
534 :returns: True when ODL is started.
536 :raises HoneycombError: When the response is not code 200: OK.
539 path = HcUtil.read_path_from_url_file(
540 "odl_client/odl_netconf_connector")
541 expected_status_codes = (HTTPCodes.UNAUTHORIZED,
544 HTTPCodes.SERVICE_UNAVAILABLE,
545 HTTPCodes.INTERNAL_SERVER_ERROR)
547 status_code, _ = HTTPRequest.get(node, path, timeout=10,
548 enable_logging=False)
549 if status_code == HTTPCodes.OK:
550 logger.info("ODL client on node {0} is up and running".
551 format(node['host']))
552 elif status_code in expected_status_codes:
553 if status_code == HTTPCodes.UNAUTHORIZED:
554 logger.info('Unauthorized. If this triggers keyword '
555 'timeout, verify username and password.')
556 raise HoneycombError('ODL client on node {0} running but '
557 'not yet ready.'.format(node['host']),
558 enable_logging=False)
560 raise HoneycombError('Unexpected return code: {0}.'.
565 def check_odl_shutdown_state(node):
566 """Check the status of ODL client shutdown.
568 :param node: Honeycomb node.
570 :returns: True when ODL is stopped.
572 :raises HoneycombError: When the response is not code 200: OK.
575 cmd = "pgrep -f karaf"
576 path = HcUtil.read_path_from_url_file(
577 "odl_client/odl_netconf_connector")
580 HTTPRequest.get(node, path, timeout=10, enable_logging=False)
581 raise HoneycombError("ODL client is still running.")
582 except HTTPRequestError:
583 logger.debug("Connection refused, checking process state....")
586 ret_code, _, _ = ssh.exec_command(cmd)
588 raise HoneycombError("ODL client is still running.")
593 def mount_honeycomb_on_odl(node):
594 """Tell ODL client to mount Honeycomb instance over netconf.
596 :param node: Honeycomb node.
598 :raises HoneycombError: When the response is not code 200: OK.
601 path = HcUtil.read_path_from_url_file(
602 "odl_client/odl_netconf_connector")
604 url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC,
605 "odl_client/mount_honeycomb.json")
607 with open(url_file) as template:
608 data = template.read()
612 status_code, _ = HTTPRequest.post(
615 headers={"Content-Type": "application/json",
616 "Accept": "text/plain"},
619 enable_logging=False)
621 if status_code == HTTPCodes.OK:
622 logger.info("ODL mount point configured successfully.")
623 elif status_code == HTTPCodes.CONFLICT:
624 logger.info("ODL mount point was already configured.")
626 raise HoneycombError('Mount point configuration not successful')
629 def stop_odl_client(node, path):
630 """Stop ODL client service on the specified node.
632 :param node: Node to start ODL client on.
633 :param path: Path to ODL client.
636 :raises HoneycombError: If ODL client fails to stop.
642 cmd = "{0}/*karaf*/bin/stop".format(path)
646 ret_code, _, _ = ssh.exec_command_sudo(cmd)
647 if int(ret_code) != 0:
648 logger.debug("ODL Client refused to shut down.")
649 cmd = "pkill -f 'karaf'"
650 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
651 if int(ret_code) != 0:
652 raise HoneycombError('Node {0} failed to stop ODL.'.
653 format(node['host']))
655 logger.info("ODL client service stopped.")
658 def set_static_arp(node, ip_address, mac_address):
659 """Configure a static ARP entry using arp.
661 :param node: Node in topology.
662 :param ip_address: IP address for the entry.
663 :param mac_address: MAC adddress for the entry.
665 :type ip_address: str
666 :type mac_address: str
667 :raises RuntimeError: If the operation fails.
672 ret_code, _, _ = ssh.exec_command_sudo("arp -s {0} {1}".format(
673 ip_address, mac_address))
676 raise RuntimeError("Failed to configure static ARP adddress.")
679 class HoneycombStartupConfig(object):
680 """Generator for Honeycomb startup configuration.
685 self.template = """#!/bin/sh -
688 while [ $STATUS -eq 100 ]
690 {java_call} -jar $(dirname $0)/{jar_filename}
692 echo "Honeycomb exited with status: $STATUS"
693 if [ $STATUS -eq 100 ]
700 self.java_call = "{scheduler} {affinity} java{jit_mode}{params}"
703 self.core_affinity = ""
711 def apply_config(self, node):
712 """Generate configuration file /opt/honeycomb/honeycomb on the specified
715 :param node: Honeycomb node.
719 self.ssh.connect(node)
720 _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar")
722 java_call = self.java_call.format(scheduler=self.scheduler,
723 affinity=self.core_affinity,
724 jit_mode=self.jit_mode,
726 self.config = self.template.format(java_call=java_call,
727 jar_filename=filename)
729 self.ssh.connect(node)
730 cmd = "echo '{config}' > /tmp/honeycomb " \
731 "&& chmod +x /tmp/honeycomb " \
732 "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".\
733 format(config=self.config)
734 self.ssh.exec_command(cmd)
736 def set_cpu_scheduler(self, scheduler="FIFO"):
737 """Use alternate CPU scheduler.
739 Note: OTHER scheduler doesn't load-balance over isolcpus.
741 :param scheduler: CPU scheduler to use.
745 schedulers = {"FIFO": "-f 99", # First In, First Out
746 "RR": "-r 99", # Round Robin
747 "OTHER": "-o", # Ubuntu default
749 self.scheduler = "chrt {0}".format(schedulers[scheduler])
751 def set_cpu_core_affinity(self, low, high=None):
752 """Set core affinity for the honeycomb process and subprocesses.
754 :param low: Lowest core ID number.
755 :param high: Highest core ID number. Leave empty to use a single core.
760 self.core_affinity = "taskset -c {low}-{high}".format(
761 low=low, high=high if high else low)
763 def set_jit_compiler_mode(self, jit_mode):
764 """Set running mode for Java's JIT compiler.
766 :param jit_mode: Desiret JIT mode.
770 modes = {"client": " -client", # Default
771 "server": " -server", # Higher performance but longer warmup
772 "classic": " -classic" # Disables JIT compiler
775 self.jit_mode = modes[jit_mode]
777 def set_memory_size(self, mem_min, mem_max=None):
778 """Set minimum and maximum memory use for the JVM.
780 :param mem_min: Minimum amount of memory (MB).
781 :param mem_max: Maximum amount of memory (MB). Default is 4 times
787 self.params += " -Xms{min}m -Xmx{max}m".format(
788 min=mem_min, max=mem_max if mem_max else mem_min*4)
790 def set_metaspace_size(self, mem_min, mem_max=None):
791 """Set minimum and maximum memory used for class metadata in the JVM.
793 :param mem_min: Minimum metaspace size (MB).
794 :param mem_max: Maximum metaspace size (MB). Defailt is 4 times
800 self.params += " -XX:MetaspaceSize={min}m " \
801 "-XX:MaxMetaspaceSize={max}m".format(
802 min=mem_min, max=mem_max if mem_max else mem_min*4)
804 def set_numa_optimization(self):
805 """Use optimization of memory use and garbage collection for NUMA
808 self.params += " -XX:+UseNUMA -XX:+UseParallelGC"
810 def set_ssh_security_provider(self):
811 """Disables BouncyCastle for SSHD."""
812 # Workaround for issue described in:
813 # https://wiki.fd.io/view/Honeycomb/Releases/1609/Honeycomb_and_ODL
815 self.params += " -Dorg.apache.sshd.registerBouncyCastle=false"