1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
14 """Implementation of keywords for Honeycomb setup."""
16 from json import loads
18 from ipaddress import IPv6Address, AddressValueError
20 from robot.api import logger
22 from resources.libraries.python.HTTPRequest import HTTPRequest, HTTPCodes, \
24 from resources.libraries.python.constants import Constants as Const
25 from resources.libraries.python.honeycomb.HoneycombUtil import HoneycombError
26 from resources.libraries.python.honeycomb.HoneycombUtil \
27 import HoneycombUtil as HcUtil
28 from resources.libraries.python.ssh import SSH
29 from resources.libraries.python.topology import NodeType
32 class HoneycombSetup(object):
33 """Implements keywords for Honeycomb setup.
35 The keywords implemented in this class make possible to:
38 - check the Honeycomb start-up state,
39 - check the Honeycomb shutdown state,
40 - add VPP to the topology.
47 def start_honeycomb_on_duts(*nodes):
48 """Start Honeycomb on specified DUT nodes.
50 This keyword starts the Honeycomb service on specified DUTs.
51 The keyword just starts the Honeycomb and does not check its startup
52 state. Use the keyword "Check Honeycomb Startup State" to check if the
53 Honeycomb is up and running.
54 Honeycomb must be installed in "/opt" directory, otherwise the start
56 :param nodes: List of nodes to start Honeycomb on.
58 :raises HoneycombError: If Honeycomb fails to start.
61 HoneycombSetup.print_environment(nodes)
63 logger.console("\n(re)Starting Honeycomb service ...")
65 cmd = "sudo service honeycomb start"
68 if node['type'] == NodeType.DUT:
71 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
72 if int(ret_code) != 0:
73 raise HoneycombError('Node {0} failed to start Honeycomb.'.
76 logger.info("Starting the Honeycomb service on node {0} is "
77 "in progress ...".format(node['host']))
80 def stop_honeycomb_on_duts(*nodes):
81 """Stop the Honeycomb service on specified DUT nodes.
83 This keyword stops the Honeycomb service on specified nodes. It just
84 stops the Honeycomb and does not check its shutdown state. Use the
85 keyword "Check Honeycomb Shutdown State" to check if Honeycomb has
87 :param nodes: List of nodes to stop Honeycomb on.
89 :raises HoneycombError: If Honeycomb failed to stop.
91 logger.console("\nShutting down Honeycomb service ...")
93 cmd = "sudo service honeycomb stop"
97 if node['type'] == NodeType.DUT:
100 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
101 if int(ret_code) != 0:
102 errors.append(node['host'])
104 logger.info("Stopping the Honeycomb service on node {0} is "
105 "in progress ...".format(node['host']))
107 raise HoneycombError('Node(s) {0} failed to stop Honeycomb.'.
111 def restart_honeycomb_on_dut(node):
112 """Restart Honeycomb on specified DUT nodes.
114 This keyword restarts the Honeycomb service on specified DUTs. Use the
115 keyword "Check Honeycomb Startup State" to check if the Honeycomb is up
118 :param node: Node to restart Honeycomb on.
120 :raises HoneycombError: If Honeycomb fails to start.
123 logger.console("\n(re)Starting Honeycomb service ...")
125 cmd = "sudo service honeycomb restart"
129 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
130 if int(ret_code) != 0:
131 raise HoneycombError('Node {0} failed to restart Honeycomb.'.
132 format(node['host']))
135 "Honeycomb service restart is in progress on node {0}".format(
139 def check_honeycomb_startup_state(*nodes):
140 """Check state of Honeycomb service during startup on specified nodes.
142 Reads html path from template file oper_vpp_version.url.
144 Honeycomb nodes reply with connection refused or the following status
145 codes depending on startup progress: codes 200, 401, 403, 404, 500, 503
147 :param nodes: List of DUT nodes starting Honeycomb.
149 :return: True if all GETs returned code 200(OK).
152 path = HcUtil.read_path_from_url_file("oper_vpp_version")
153 expected_status_codes = (HTTPCodes.UNAUTHORIZED,
156 HTTPCodes.SERVICE_UNAVAILABLE,
157 HTTPCodes.INTERNAL_SERVER_ERROR)
160 if node['type'] == NodeType.DUT:
161 HoneycombSetup.print_ports(node)
163 status_code, _ = HTTPRequest.get(node, path,
164 enable_logging=False)
165 except HTTPRequestError:
168 ret_code, _, _ = ssh.exec_command_sudo(
169 "tail -n 100 /var/log/syslog")
171 # It's probably Centos
172 ssh.exec_command_sudo("tail -n 100 /var/log/messages")
174 if status_code == HTTPCodes.OK:
175 logger.info("Honeycomb on node {0} is up and running".
176 format(node['host']))
177 elif status_code in expected_status_codes:
178 if status_code == HTTPCodes.UNAUTHORIZED:
179 logger.info('Unauthorized. If this triggers keyword '
180 'timeout, verify Honeycomb username and '
182 raise HoneycombError('Honeycomb on node {0} running but '
183 'not yet ready.'.format(node['host']),
184 enable_logging=False)
186 raise HoneycombError('Unexpected return code: {0}.'.
189 status_code, _ = HcUtil.get_honeycomb_data(
190 node, "config_vpp_interfaces")
191 if status_code != HTTPCodes.OK:
192 raise HoneycombError('Honeycomb on node {0} running but '
193 'not yet ready.'.format(node['host']),
194 enable_logging=False)
198 def check_honeycomb_shutdown_state(*nodes):
199 """Check state of Honeycomb service during shutdown on specified nodes.
201 Honeycomb nodes reply with connection refused or the following status
202 codes depending on shutdown progress: codes 200, 404.
204 :param nodes: List of DUT nodes stopping Honeycomb.
206 :return: True if all GETs fail to connect.
209 cmd = "ps -ef | grep -v grep | grep honeycomb"
211 if node['type'] == NodeType.DUT:
213 status_code, _ = HTTPRequest.get(node, '/index.html',
214 enable_logging=False)
215 if status_code == HTTPCodes.OK:
216 raise HoneycombError('Honeycomb on node {0} is still '
217 'running.'.format(node['host']),
218 enable_logging=False)
219 elif status_code == HTTPCodes.NOT_FOUND:
220 raise HoneycombError('Honeycomb on node {0} is shutting'
221 ' down.'.format(node['host']),
222 enable_logging=False)
224 raise HoneycombError('Unexpected return code: {0}.'.
226 except HTTPRequestError:
227 logger.debug('Connection refused, checking the process '
231 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
233 raise HoneycombError('Honeycomb on node {0} is still '
234 'running.'.format(node['host']),
235 enable_logging=False)
237 logger.info("Honeycomb on node {0} has stopped".
238 format(node['host']))
242 def configure_restconf_binding_address(node):
243 """Configure Honeycomb to accept restconf requests from all IP
244 addresses. IP version is determined by node data.
246 :param node: Information about a DUT node.
248 :raises HoneycombError: If the configuration could not be changed.
251 find = "restconf-binding-address"
253 IPv6Address(unicode(node["host"]))
254 # if management IP of the node is in IPv6 format
255 replace = '\\"restconf-binding-address\\": \\"0::0\\",'
256 except (AttributeError, AddressValueError):
257 replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",'
259 argument = '"/{0}/c\\ {1}"'.format(find, replace)
260 path = "{0}/config/restconf.json".format(Const.REMOTE_HC_DIR)
261 command = "sed -i {0} {1}".format(argument, path)
265 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
267 raise HoneycombError("Failed to modify configuration on "
268 "node {0}, {1}".format(node, stderr))
271 def configure_jvpp_timeout(node, timeout=10):
272 """Configure timeout value for Java API commands Honeycomb sends to VPP.
274 :param node: Information about a DUT node.
275 :param timeout: Timeout value in seconds.
278 :raises HoneycombError: If the configuration could not be changed.
281 find = "jvpp-request-timeout"
282 replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout)
284 argument = '"/{0}/c\\ {1}"'.format(find, replace)
285 path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR)
286 command = "sed -i {0} {1}".format(argument, path)
290 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
292 raise HoneycombError("Failed to modify configuration on "
293 "node {0}, {1}".format(node, stderr))
296 def print_environment(nodes):
297 """Print information about the nodes to log. The information is defined
298 by commands in cmds tuple at the beginning of this method.
300 :param nodes: List of DUT nodes to get information about.
304 # TODO: When everything is set and running in VIRL env, transform this
305 # method to a keyword checking the environment.
313 "dpkg --list | grep openjdk",
314 "ls -la /opt/honeycomb")
317 if node['type'] == NodeType.DUT:
318 logger.info("Checking node {} ...".format(node['host']))
320 logger.info("Command: {}".format(cmd))
323 ssh.exec_command_sudo(cmd)
326 def print_ports(node):
327 """Uses "sudo netstat -anp | grep java" to print port where a java
330 :param node: Honeycomb node where we want to print the ports.
334 cmds = ("netstat -anp | grep java",
335 "ps -ef | grep [h]oneycomb")
337 logger.info("Checking node {} ...".format(node['host']))
339 logger.info("Command: {}".format(cmd))
342 ssh.exec_command_sudo(cmd)
345 def configure_log_level(node, level):
346 """Set Honeycomb logging to the specified level.
348 :param node: Honeycomb node.
349 :param level: Log level (INFO, DEBUG, TRACE).
354 find = 'logger name=\\"io.fd\\"'
355 replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level)
357 argument = '"/{0}/c\\ {1}"'.format(find, replace)
358 path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR)
359 command = "sed -i {0} {1}".format(argument, path)
363 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
365 raise HoneycombError("Failed to modify configuration on "
366 "node {0}, {1}".format(node, stderr))
369 def manage_honeycomb_features(node, feature, disable=False):
370 """Configure Honeycomb to use features that are disabled by default, or
371 disable previously enabled features.
373 ..Note:: If the module is not enabled in VPP, Honeycomb will
374 be unable to establish VPP connection.
376 :param node: Honeycomb node.
377 :param feature: Feature to enable.
378 :param disable: Disable the specified feature instead of enabling it.
380 :type feature: string
382 :raises HoneycombError: If the configuration could not be changed.
385 disabled_features = {
386 "NSH": "io.fd.hc2vpp.vppnsh.impl.VppNshModule"
392 if feature in disabled_features.keys():
393 # uncomment by replacing the entire line
394 find = replace = "{0}".format(disabled_features[feature])
396 replace = "// {0}".format(find)
398 argument = '"/{0}/c\\ {1}"'.format(find, replace)
399 path = "{0}/modules/*module-config"\
400 .format(Const.REMOTE_HC_DIR)
401 command = "sed -i {0} {1}".format(argument, path)
403 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
405 raise HoneycombError("Failed to modify configuration on "
406 "node {0}, {1}".format(node, stderr))
408 raise HoneycombError(
409 "Unrecognized feature {0}.".format(feature))
412 def copy_java_libraries(node):
413 """Copy Java libraries installed by vpp-api-java package to honeycomb
416 This is a (temporary?) workaround for jvpp version mismatches.
418 :param node: Honeycomb node
424 (_, stdout, _) = ssh.exec_command_sudo(
425 "ls /usr/share/java | grep ^jvpp-*")
427 files = stdout.split("\n")[:-1]
430 # jvpp-registry-17.04.jar
431 # jvpp-core-17.04.jar
433 parts = item.split("-")
434 version = "{0}-SNAPSHOT".format(parts[2][:5])
435 artifact_id = "{0}-{1}".format(parts[0], parts[1])
437 directory = "{0}/lib/io/fd/vpp/{1}/{2}".format(
438 Const.REMOTE_HC_DIR, artifact_id, version)
439 cmd = "sudo mkdir -p {0}; " \
440 "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format(
441 directory, item, artifact_id, version)
443 (ret_code, _, stderr) = ssh.exec_command(cmd)
445 raise HoneycombError("Failed to copy JVPP libraries on "
446 "node {0}, {1}".format(node, stderr))
449 def copy_odl_client(node, odl_name, src_path, dst_path):
450 """Copy ODL Client from source path to destination path.
452 :param node: Honeycomb node.
453 :param odl_name: Name of ODL client version to use.
454 :param src_path: Source Path where to find ODl client.
455 :param dst_path: Destination path.
460 :raises HoneycombError: If the operation fails.
466 cmd = "sudo rm -rf {dst}/*karaf_{odl_name} && " \
467 "cp -r {src}/*karaf_{odl_name}* {dst}".format(
468 src=src_path, odl_name=odl_name, dst=dst_path)
470 ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=120)
471 if int(ret_code) != 0:
472 raise HoneycombError(
473 "Failed to copy ODL client on node {0}".format(node["host"]))
476 def setup_odl_client(node, path):
477 """Start ODL client on the specified node.
479 Karaf should be located in the provided path, and VPP and Honeycomb
480 should already be running, otherwise the start will fail.
481 :param node: Node to start ODL client on.
482 :param path: Path to ODL client on node.
485 :raises HoneycombError: If Honeycomb fails to start.
488 logger.console("\nStarting ODL client ...")
492 cmd = "{path}/*karaf*/bin/start clean".format(path=path)
493 ret_code, _, _ = ssh.exec_command_sudo(cmd)
495 if int(ret_code) != 0:
496 raise HoneycombError('Node {0} failed to start ODL.'.
497 format(node['host']))
499 logger.info("Starting the ODL client on node {0} is "
500 "in progress ...".format(node['host']))
503 def install_odl_features(node, path, *features):
504 """Install required features on a running ODL client.
506 :param node: Honeycomb node.
507 :param path: Path to ODL client on node.
508 :param features: Optional, list of additional features to install.
517 cmd = "{path}/*karaf*/bin/client -u karaf feature:install " \
518 "odl-restconf-all " \
519 "odl-netconf-connector-all " \
520 "odl-netconf-topology".format(path=path)
521 for feature in features:
522 cmd += " {0}".format(feature)
524 ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=250)
526 if int(ret_code) != 0:
527 raise HoneycombError("Feature install did not succeed.")
530 def check_odl_startup_state(node):
531 """Check the status of ODL client startup.
533 :param node: Honeycomb node.
535 :returns: True when ODL is started.
537 :raises HoneycombError: When the response is not code 200: OK.
540 path = HcUtil.read_path_from_url_file(
541 "odl_client/odl_netconf_connector")
542 expected_status_codes = (HTTPCodes.UNAUTHORIZED,
545 HTTPCodes.SERVICE_UNAVAILABLE,
546 HTTPCodes.INTERNAL_SERVER_ERROR)
548 status_code, _ = HTTPRequest.get(node, path, timeout=10,
549 enable_logging=False)
550 if status_code == HTTPCodes.OK:
551 logger.info("ODL client on node {0} is up and running".
552 format(node['host']))
553 elif status_code in expected_status_codes:
554 if status_code == HTTPCodes.UNAUTHORIZED:
555 logger.info('Unauthorized. If this triggers keyword '
556 'timeout, verify username and password.')
557 raise HoneycombError('ODL client on node {0} running but '
558 'not yet ready.'.format(node['host']),
559 enable_logging=False)
561 raise HoneycombError('Unexpected return code: {0}.'.
566 def check_odl_shutdown_state(node):
567 """Check the status of ODL client shutdown.
569 :param node: Honeycomb node.
571 :returns: True when ODL is stopped.
573 :raises HoneycombError: When the response is not code 200: OK.
576 cmd = "pgrep -f karaf"
577 path = HcUtil.read_path_from_url_file(
578 "odl_client/odl_netconf_connector")
581 HTTPRequest.get(node, path, timeout=10, enable_logging=False)
582 raise HoneycombError("ODL client is still running.")
583 except HTTPRequestError:
584 logger.debug("Connection refused, checking process state....")
587 ret_code, _, _ = ssh.exec_command(cmd)
589 raise HoneycombError("ODL client is still running.")
594 def mount_honeycomb_on_odl(node):
595 """Tell ODL client to mount Honeycomb instance over netconf.
597 :param node: Honeycomb node.
599 :raises HoneycombError: When the response is not code 200: OK.
602 path = HcUtil.read_path_from_url_file(
603 "odl_client/odl_netconf_connector")
605 url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC,
606 "odl_client/mount_honeycomb.json")
608 with open(url_file) as template:
609 data = template.read()
613 status_code, _ = HTTPRequest.post(
616 headers={"Content-Type": "application/json",
617 "Accept": "text/plain"},
620 enable_logging=False)
622 if status_code == HTTPCodes.OK:
623 logger.info("ODL mount point configured successfully.")
624 elif status_code == HTTPCodes.CONFLICT:
625 logger.info("ODL mount point was already configured.")
627 raise HoneycombError('Mount point configuration not successful')
630 def stop_odl_client(node, path):
631 """Stop ODL client service on the specified node.
633 :param node: Node to start ODL client on.
634 :param path: Path to ODL client.
637 :raises HoneycombError: If ODL client fails to stop.
643 cmd = "{0}/*karaf*/bin/stop".format(path)
647 ret_code, _, _ = ssh.exec_command_sudo(cmd)
648 if int(ret_code) != 0:
649 logger.debug("ODL Client refused to shut down.")
650 cmd = "pkill -f 'karaf'"
651 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
652 if int(ret_code) != 0:
653 raise HoneycombError('Node {0} failed to stop ODL.'.
654 format(node['host']))
656 logger.info("ODL client service stopped.")
659 def stop_vpp_service(node):
660 """Stop VPP service on the specified node.
662 :param node: VPP node.
664 :raises RuntimeError: If VPP fails to stop.
669 cmd = "service vpp stop"
670 ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=80)
671 if int(ret_code) != 0:
672 logger.debug("VPP service refused to shut down.")
675 class HoneycombStartupConfig(object):
676 """Generator for Honeycomb startup configuration.
685 while [ $STATUS -eq 100 ]
687 {java_call} -jar $(dirname $0)/{jar_filename}
689 echo "Honeycomb exited with status: $STATUS"
690 if [ $STATUS -eq 100 ]
697 self.java_call = "{scheduler} {affinity} java{jit_mode}{params}"
700 self.core_affinity = ""
708 def apply_config(self, node):
709 """Generate configuration file /opt/honeycomb/honeycomb on the specified
712 :param node: Honeycomb node.
716 self.ssh.connect(node)
717 _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar")
719 java_call = self.java_call.format(scheduler=self.scheduler,
720 affinity=self.core_affinity,
721 jit_mode=self.jit_mode,
723 self.config = self.template.format(java_call=java_call,
724 jar_filename=filename)
726 self.ssh.connect(node)
727 cmd = "echo '{config}' > /tmp/honeycomb " \
728 "&& chmod +x /tmp/honeycomb " \
729 "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".\
730 format(config=self.config)
731 self.ssh.exec_command(cmd)
733 def set_cpu_scheduler(self, scheduler="FIFO"):
734 """Use alternate CPU scheduler.
736 Note: OTHER scheduler doesn't load-balance over isolcpus.
738 :param scheduler: CPU scheduler to use.
742 schedulers = {"FIFO": "-f 99", # First In, First Out
743 "RR": "-r 99", # Round Robin
744 "OTHER": "-o", # Ubuntu default
746 self.scheduler = "chrt {0}".format(schedulers[scheduler])
748 def set_cpu_core_affinity(self, low, high=None):
749 """Set core affinity for the honeycomb process and subprocesses.
751 :param low: Lowest core ID number.
752 :param high: Highest core ID number. Leave empty to use a single core.
757 self.core_affinity = "taskset -c {low}-{high}".format(
758 low=low, high=high if high else low)
760 def set_jit_compiler_mode(self, jit_mode):
761 """Set running mode for Java's JIT compiler.
763 :param jit_mode: Desiret JIT mode.
767 modes = {"client": " -client", # Default
768 "server": " -server", # Higher performance but longer warmup
769 "classic": " -classic" # Disables JIT compiler
772 self.jit_mode = modes[jit_mode]
774 def set_memory_size(self, mem_min, mem_max=None):
775 """Set minimum and maximum memory use for the JVM.
777 :param mem_min: Minimum amount of memory (MB).
778 :param mem_max: Maximum amount of memory (MB). Default is 4 times
784 self.params += " -Xms{min}m -Xmx{max}m".format(
785 min=mem_min, max=mem_max if mem_max else mem_min*4)
787 def set_metaspace_size(self, mem_min, mem_max=None):
788 """Set minimum and maximum memory used for class metadata in the JVM.
790 :param mem_min: Minimum metaspace size (MB).
791 :param mem_max: Maximum metaspace size (MB). Defailt is 4 times
797 self.params += " -XX:MetaspaceSize={min}m " \
798 "-XX:MaxMetaspaceSize={max}m".format(
799 min=mem_min, max=mem_max if mem_max else mem_min*4)
801 def set_numa_optimization(self):
802 """Use optimization of memory use and garbage collection for NUMA
805 self.params += " -XX:+UseNUMA -XX:+UseParallelGC"
807 def set_ssh_security_provider(self):
808 """Disables BouncyCastle for SSHD."""
809 # Workaround for issue described in:
810 # https://wiki.fd.io/view/Honeycomb/Releases/1609/Honeycomb_and_ODL
812 self.params += " -Dorg.apache.sshd.registerBouncyCastle=false"