1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
14 """Implementation of keywords for Honeycomb setup."""
16 from json import loads
17 from time import time, sleep
19 from ipaddress import IPv6Address, AddressValueError
21 from robot.api import logger
23 from resources.libraries.python.HTTPRequest import HTTPRequest, HTTPCodes, \
25 from resources.libraries.python.constants import Constants as Const
26 from resources.libraries.python.honeycomb.HoneycombUtil import HoneycombError
27 from resources.libraries.python.honeycomb.HoneycombUtil \
28 import HoneycombUtil as HcUtil
29 from resources.libraries.python.ssh import SSH
30 from resources.libraries.python.topology import NodeType
33 class HoneycombSetup(object):
34 """Implements keywords for Honeycomb setup.
36 The keywords implemented in this class make possible to:
39 - check the Honeycomb start-up state,
40 - check the Honeycomb shutdown state,
41 - add VPP to the topology.
48 def start_honeycomb_on_duts(*nodes):
49 """Start Honeycomb on specified DUT nodes.
51 This keyword starts the Honeycomb service on specified DUTs.
52 The keyword just starts the Honeycomb and does not check its startup
53 state. Use the keyword "Check Honeycomb Startup State" to check if the
54 Honeycomb is up and running.
55 Honeycomb must be installed in "/opt" directory, otherwise the start
57 :param nodes: List of nodes to start Honeycomb on.
59 :raises HoneycombError: If Honeycomb fails to start.
62 HoneycombSetup.print_environment(nodes)
64 logger.console("\n(re)Starting Honeycomb service ...")
66 cmd = "sudo service honeycomb start"
69 if node['type'] == NodeType.DUT:
72 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
73 if int(ret_code) != 0:
74 raise HoneycombError('Node {0} failed to start Honeycomb.'.
77 logger.info("Starting the Honeycomb service on node {0} is "
78 "in progress ...".format(node['host']))
81 def stop_honeycomb_on_duts(*nodes):
82 """Stop the Honeycomb service on specified DUT nodes.
84 This keyword stops the Honeycomb service on specified nodes. It just
85 stops the Honeycomb and does not check its shutdown state. Use the
86 keyword "Check Honeycomb Shutdown State" to check if Honeycomb has
88 :param nodes: List of nodes to stop Honeycomb on.
90 :raises HoneycombError: If Honeycomb failed to stop.
92 logger.console("\nShutting down Honeycomb service ...")
94 cmd = "sudo service honeycomb stop"
98 if node['type'] == NodeType.DUT:
101 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
102 if int(ret_code) != 0:
103 errors.append(node['host'])
105 logger.info("Stopping the Honeycomb service on node {0} is "
106 "in progress ...".format(node['host']))
108 raise HoneycombError('Node(s) {0} failed to stop Honeycomb.'.
112 def restart_honeycomb_on_dut(node):
113 """Restart Honeycomb on specified DUT nodes.
115 This keyword restarts the Honeycomb service on specified DUTs. Use the
116 keyword "Check Honeycomb Startup State" to check if the Honeycomb is up
119 :param node: Node to restart Honeycomb on.
121 :raises HoneycombError: If Honeycomb fails to start.
124 logger.console("\n(re)Starting Honeycomb service ...")
126 cmd = "sudo service honeycomb restart"
130 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
131 if int(ret_code) != 0:
132 raise HoneycombError('Node {0} failed to restart Honeycomb.'.
133 format(node['host']))
136 "Honeycomb service restart is in progress on node {0}".format(
140 def check_honeycomb_startup_state(node, timeout=360, retries=20,
142 """Repeatedly check the status of Honeycomb startup until it is fully
143 started or until timeout or max retries is reached.
145 :param node: Honeycomb node.
146 :param timeout: Timeout value in seconds.
147 :param retries: Max number of retries.
148 :param interval: Interval between checks, in seconds.
153 :raises HoneycombError: If the Honeycomb process IP cannot be found,
154 or if timeout or number of retries is exceeded."""
161 while time() - start < timeout and count < retries:
165 status_code_version, _ = HcUtil.get_honeycomb_data(
166 node, "oper_vpp_version")
167 status_code_if_cfg, _ = HcUtil.get_honeycomb_data(
168 node, "config_vpp_interfaces")
169 status_code_if_oper, _ = HcUtil.get_honeycomb_data(
170 node, "oper_vpp_interfaces")
171 except HTTPRequestError:
174 if status_code_if_cfg == HTTPCodes.OK\
175 and status_code_if_cfg == HTTPCodes.OK\
176 and status_code_if_oper == HTTPCodes.OK:
177 logger.info("Check successful, Honeycomb is up and running.")
181 "Attempt ${count} failed on Restconf check. Status codes:\n"
182 "Version: {version}\n"
183 "Interface config: {if_cfg}\n"
184 "Interface operational: {if_oper}".format(
186 version=status_code_version,
187 if_cfg=status_code_if_cfg,
188 if_oper=status_code_if_oper))
192 _, vpp_status, _ = ssh.exec_command("sudo service vpp status")
193 raise HoneycombError(
194 "Timeout or max retries exceeded. Status of VPP:\n"
195 "{vpp_status}".format(vpp_status=vpp_status))
198 def check_honeycomb_shutdown_state(node):
199 """Check state of Honeycomb service during shutdown on specified nodes.
201 Honeycomb nodes reply with connection refused or the following status
202 codes depending on shutdown progress: codes 200, 404.
204 :param node: List of DUT nodes stopping Honeycomb.
206 :return: True if all GETs fail to connect.
209 cmd = "pgrep honeycomb"
213 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
215 raise HoneycombError('Honeycomb on node {0} is still '
216 'running.'.format(node['host']),
217 enable_logging=False)
219 logger.info("Honeycomb on node {0} has stopped".
220 format(node['host']))
224 def configure_restconf_binding_address(node):
225 """Configure Honeycomb to accept restconf requests from all IP
226 addresses. IP version is determined by node data.
228 :param node: Information about a DUT node.
230 :raises HoneycombError: If the configuration could not be changed.
233 find = "restconf-binding-address"
235 IPv6Address(unicode(node["host"]))
236 # if management IP of the node is in IPv6 format
237 replace = '\\"restconf-binding-address\\": \\"0::0\\",'
238 except (AttributeError, AddressValueError):
239 replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",'
241 argument = '"/{0}/c\\ {1}"'.format(find, replace)
242 path = "{0}/config/restconf.json".format(Const.REMOTE_HC_DIR)
243 command = "sed -i {0} {1}".format(argument, path)
247 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
249 raise HoneycombError("Failed to modify configuration on "
250 "node {0}, {1}".format(node, stderr))
253 def configure_jvpp_timeout(node, timeout=10):
254 """Configure timeout value for Java API commands Honeycomb sends to VPP.
256 :param node: Information about a DUT node.
257 :param timeout: Timeout value in seconds.
260 :raises HoneycombError: If the configuration could not be changed.
263 find = "jvpp-request-timeout"
264 replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout)
266 argument = '"/{0}/c\\ {1}"'.format(find, replace)
267 path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR)
268 command = "sed -i {0} {1}".format(argument, path)
272 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
274 raise HoneycombError("Failed to modify configuration on "
275 "node {0}, {1}".format(node, stderr))
278 def print_environment(nodes):
279 """Print information about the nodes to log. The information is defined
280 by commands in cmds tuple at the beginning of this method.
282 :param nodes: List of DUT nodes to get information about.
286 # TODO: When everything is set and running in VIRL env, transform this
287 # method to a keyword checking the environment.
295 "dpkg --list | grep openjdk",
296 "ls -la /opt/honeycomb",
297 "cat /opt/honeycomb/modules/*module-config")
300 if node['type'] == NodeType.DUT:
301 logger.info("Checking node {} ...".format(node['host']))
303 logger.info("Command: {}".format(cmd))
306 ssh.exec_command_sudo(cmd)
309 def print_ports(node):
310 """Uses "sudo netstat -anp | grep java" to print port where a java
313 :param node: Honeycomb node where we want to print the ports.
317 cmds = ("netstat -anp | grep java",
318 "ps -ef | grep [h]oneycomb")
320 logger.info("Checking node {} ...".format(node['host']))
322 logger.info("Command: {}".format(cmd))
325 ssh.exec_command_sudo(cmd)
328 def configure_log_level(node, level):
329 """Set Honeycomb logging to the specified level.
331 :param node: Honeycomb node.
332 :param level: Log level (INFO, DEBUG, TRACE).
337 find = 'logger name=\\"io.fd\\"'
338 replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level)
340 argument = '"/{0}/c\\ {1}"'.format(find, replace)
341 path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR)
342 command = "sed -i {0} {1}".format(argument, path)
346 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
348 raise HoneycombError("Failed to modify configuration on "
349 "node {0}, {1}".format(node, stderr))
352 def manage_honeycomb_features(node, feature, disable=False):
353 """Configure Honeycomb to use features that are disabled by default, or
354 disable previously enabled features.
356 ..Note:: If the module is not enabled in VPP, Honeycomb will
357 be unable to establish VPP connection.
359 :param node: Honeycomb node.
360 :param feature: Feature to enable.
361 :param disable: Disable the specified feature instead of enabling it.
363 :type feature: string
365 :raises HoneycombError: If the configuration could not be changed.
368 disabled_features = {
369 "NSH": ["io.fd.hc2vpp.vppnsh.impl.VppNshModule"],
370 "BGP": ["io.fd.hc2vpp.bgp.inet.BgpInetModule",
371 "io.fd.honeycomb.infra.bgp.BgpModule",
372 "io.fd.honeycomb.infra.bgp.BgpReadersModule",
373 "io.fd.honeycomb.infra.bgp.BgpWritersModule"]
379 if feature in disabled_features.keys():
380 # for every module, uncomment by replacing the entire line
381 for item in disabled_features[feature]:
382 find = replace = "{0}".format(item)
384 replace = "// {0}".format(find)
386 argument = '"/{0}/c\\ {1}"'.format(find, replace)
387 path = "{0}/modules/*module-config"\
388 .format(Const.REMOTE_HC_DIR)
389 command = "sed -i {0} {1}".format(argument, path)
391 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
393 raise HoneycombError("Failed to modify configuration on "
394 "node {0}, {1}".format(node, stderr))
396 raise HoneycombError(
397 "Unrecognized feature {0}.".format(feature))
400 def copy_java_libraries(node):
401 """Copy Java libraries installed by vpp-api-java package to honeycomb
404 This is a (temporary?) workaround for jvpp version mismatches.
406 :param node: Honeycomb node
412 (_, stdout, _) = ssh.exec_command_sudo(
413 "ls /usr/share/java | grep ^jvpp-*")
415 files = stdout.split("\n")[:-1]
418 # jvpp-registry-17.04.jar
419 # jvpp-core-17.04.jar
421 parts = item.split("-")
422 version = "{0}-SNAPSHOT".format(parts[2][:5])
423 artifact_id = "{0}-{1}".format(parts[0], parts[1])
425 directory = "{0}/lib/io/fd/vpp/{1}/{2}".format(
426 Const.REMOTE_HC_DIR, artifact_id, version)
427 cmd = "sudo mkdir -p {0}; " \
428 "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format(
429 directory, item, artifact_id, version)
431 (ret_code, _, stderr) = ssh.exec_command(cmd)
433 raise HoneycombError("Failed to copy JVPP libraries on "
434 "node {0}, {1}".format(node, stderr))
437 def copy_odl_client(node, odl_name, src_path, dst_path):
438 """Copy ODL Client from source path to destination path.
440 :param node: Honeycomb node.
441 :param odl_name: Name of ODL client version to use.
442 :param src_path: Source Path where to find ODl client.
443 :param dst_path: Destination path.
448 :raises HoneycombError: If the operation fails.
454 cmd = "sudo rm -rf {dst}/*karaf_{odl_name} && " \
455 "cp -r {src}/*karaf_{odl_name}* {dst}".format(
456 src=src_path, odl_name=odl_name, dst=dst_path)
458 ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=180)
459 if int(ret_code) != 0:
460 raise HoneycombError(
461 "Failed to copy ODL client on node {0}".format(node["host"]))
464 def setup_odl_client(node, path):
465 """Start ODL client on the specified node.
467 Karaf should be located in the provided path, and VPP and Honeycomb
468 should already be running, otherwise the start will fail.
469 :param node: Node to start ODL client on.
470 :param path: Path to ODL client on node.
473 :raises HoneycombError: If Honeycomb fails to start.
476 logger.console("\nStarting ODL client ...")
480 cmd = "{path}/*karaf*/bin/start clean".format(path=path)
481 ret_code, _, _ = ssh.exec_command_sudo(cmd)
483 if int(ret_code) != 0:
484 raise HoneycombError('Node {0} failed to start ODL.'.
485 format(node['host']))
487 logger.info("Starting the ODL client on node {0} is "
488 "in progress ...".format(node['host']))
491 def install_odl_features(node, path, *features):
492 """Install required features on a running ODL client.
494 :param node: Honeycomb node.
495 :param path: Path to ODL client on node.
496 :param features: Optional, list of additional features to install.
505 cmd = "{path}/*karaf*/bin/client -u karaf feature:install " \
506 "odl-restconf-all " \
507 "odl-netconf-connector-all " \
508 "odl-netconf-topology".format(path=path)
509 for feature in features:
510 cmd += " {0}".format(feature)
512 ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=250)
514 if int(ret_code) != 0:
515 raise HoneycombError("Feature install did not succeed.")
518 def check_odl_startup_state(node):
519 """Check the status of ODL client startup.
521 :param node: Honeycomb node.
523 :returns: True when ODL is started.
525 :raises HoneycombError: When the response is not code 200: OK.
528 path = HcUtil.read_path_from_url_file(
529 "odl_client/odl_netconf_connector")
530 expected_status_codes = (HTTPCodes.UNAUTHORIZED,
533 HTTPCodes.SERVICE_UNAVAILABLE,
534 HTTPCodes.INTERNAL_SERVER_ERROR)
536 status_code, _ = HTTPRequest.get(node, path, timeout=10,
537 enable_logging=False)
538 if status_code == HTTPCodes.OK:
539 logger.info("ODL client on node {0} is up and running".
540 format(node['host']))
541 elif status_code in expected_status_codes:
542 if status_code == HTTPCodes.UNAUTHORIZED:
543 logger.info('Unauthorized. If this triggers keyword '
544 'timeout, verify username and password.')
545 raise HoneycombError('ODL client on node {0} running but '
546 'not yet ready.'.format(node['host']),
547 enable_logging=False)
549 raise HoneycombError('Unexpected return code: {0}.'.
554 def check_odl_shutdown_state(node):
555 """Check the status of ODL client shutdown.
557 :param node: Honeycomb node.
559 :returns: True when ODL is stopped.
561 :raises HoneycombError: When the response is not code 200: OK.
564 cmd = "pgrep -f karaf"
565 path = HcUtil.read_path_from_url_file(
566 "odl_client/odl_netconf_connector")
569 HTTPRequest.get(node, path, timeout=10, enable_logging=False)
570 raise HoneycombError("ODL client is still running.")
571 except HTTPRequestError:
572 logger.debug("Connection refused, checking process state....")
575 ret_code, _, _ = ssh.exec_command(cmd)
577 raise HoneycombError("ODL client is still running.")
582 def mount_honeycomb_on_odl(node):
583 """Tell ODL client to mount Honeycomb instance over netconf.
585 :param node: Honeycomb node.
587 :raises HoneycombError: When the response is not code 200: OK.
590 path = HcUtil.read_path_from_url_file(
591 "odl_client/odl_netconf_connector")
593 url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC,
594 "odl_client/mount_honeycomb.json")
596 with open(url_file) as template:
597 data = template.read()
601 status_code, _ = HTTPRequest.post(
604 headers={"Content-Type": "application/json",
605 "Accept": "text/plain"},
608 enable_logging=False)
610 if status_code == HTTPCodes.OK:
611 logger.info("ODL mount point configured successfully.")
612 elif status_code == HTTPCodes.CONFLICT:
613 logger.info("ODL mount point was already configured.")
615 raise HoneycombError('Mount point configuration not successful')
618 def stop_odl_client(node, path):
619 """Stop ODL client service on the specified node.
621 :param node: Node to start ODL client on.
622 :param path: Path to ODL client.
625 :raises HoneycombError: If ODL client fails to stop.
631 cmd = "{0}/*karaf*/bin/stop".format(path)
635 ret_code, _, _ = ssh.exec_command_sudo(cmd)
636 if int(ret_code) != 0:
637 logger.debug("ODL Client refused to shut down.")
638 cmd = "pkill -f 'karaf'"
639 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
640 if int(ret_code) != 0:
641 raise HoneycombError('Node {0} failed to stop ODL.'.
642 format(node['host']))
644 logger.info("ODL client service stopped.")
648 class HoneycombStartupConfig(object):
649 """Generator for Honeycomb startup configuration.
654 self.template = """#!/bin/sh -
657 while [ $STATUS -eq 100 ]
659 {java_call} -jar $(dirname $0)/{jar_filename}
661 echo "Honeycomb exited with status: $STATUS"
662 if [ $STATUS -eq 100 ]
669 self.java_call = "{scheduler} {affinity} java{jit_mode}{params}"
672 self.core_affinity = ""
680 def apply_config(self, node):
681 """Generate configuration file /opt/honeycomb/honeycomb on the specified
684 :param node: Honeycomb node.
688 self.ssh.connect(node)
689 _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar")
691 java_call = self.java_call.format(scheduler=self.scheduler,
692 affinity=self.core_affinity,
693 jit_mode=self.jit_mode,
695 self.config = self.template.format(java_call=java_call,
696 jar_filename=filename)
698 self.ssh.connect(node)
699 cmd = "echo '{config}' > /tmp/honeycomb " \
700 "&& chmod +x /tmp/honeycomb " \
701 "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".\
702 format(config=self.config)
703 self.ssh.exec_command(cmd)
705 def set_cpu_scheduler(self, scheduler="FIFO"):
706 """Use alternate CPU scheduler.
708 Note: OTHER scheduler doesn't load-balance over isolcpus.
710 :param scheduler: CPU scheduler to use.
714 schedulers = {"FIFO": "-f 99", # First In, First Out
715 "RR": "-r 99", # Round Robin
716 "OTHER": "-o", # Ubuntu default
718 self.scheduler = "chrt {0}".format(schedulers[scheduler])
720 def set_cpu_core_affinity(self, low, high=None):
721 """Set core affinity for the honeycomb process and subprocesses.
723 :param low: Lowest core ID number.
724 :param high: Highest core ID number. Leave empty to use a single core.
729 self.core_affinity = "taskset -c {low}-{high}".format(
730 low=low, high=high if high else low)
732 def set_jit_compiler_mode(self, jit_mode):
733 """Set running mode for Java's JIT compiler.
735 :param jit_mode: Desiret JIT mode.
739 modes = {"client": " -client", # Default
740 "server": " -server", # Higher performance but longer warmup
741 "classic": " -classic" # Disables JIT compiler
744 self.jit_mode = modes[jit_mode]
746 def set_memory_size(self, mem_min, mem_max=None):
747 """Set minimum and maximum memory use for the JVM.
749 :param mem_min: Minimum amount of memory (MB).
750 :param mem_max: Maximum amount of memory (MB). Default is 4 times
756 self.params += " -Xms{min}m -Xmx{max}m".format(
757 min=mem_min, max=mem_max if mem_max else mem_min*4)
759 def set_metaspace_size(self, mem_min, mem_max=None):
760 """Set minimum and maximum memory used for class metadata in the JVM.
762 :param mem_min: Minimum metaspace size (MB).
763 :param mem_max: Maximum metaspace size (MB). Defailt is 4 times
769 self.params += " -XX:MetaspaceSize={min}m " \
770 "-XX:MaxMetaspaceSize={max}m".format(
771 min=mem_min, max=mem_max if mem_max else mem_min*4)
773 def set_numa_optimization(self):
774 """Use optimization of memory use and garbage collection for NUMA
777 self.params += " -XX:+UseNUMA -XX:+UseParallelGC"
779 def set_ssh_security_provider(self):
780 """Disables BouncyCastle for SSHD."""
781 # Workaround for issue described in:
782 # https://wiki.fd.io/view/Honeycomb/Releases/1609/Honeycomb_and_ODL
784 self.params += " -Dorg.apache.sshd.registerBouncyCastle=false"