1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
14 """Implementation of keywords for Honeycomb setup."""
16 from ipaddress import IPv6Address, AddressValueError
18 from robot.api import logger
20 from resources.libraries.python.HTTPRequest import HTTPRequest, HTTPCodes, \
22 from resources.libraries.python.constants import Constants as Const
23 from resources.libraries.python.honeycomb.HoneycombUtil import HoneycombError
24 from resources.libraries.python.honeycomb.HoneycombUtil \
25 import HoneycombUtil as HcUtil
26 from resources.libraries.python.ssh import SSH
27 from resources.libraries.python.topology import NodeType
30 class HoneycombSetup(object):
31 """Implements keywords for Honeycomb setup.
33 The keywords implemented in this class make possible to:
36 - check the Honeycomb start-up state,
37 - check the Honeycomb shutdown state,
38 - add VPP to the topology.
45 def start_honeycomb_on_duts(*nodes):
46 """Start Honeycomb on specified DUT nodes.
48 This keyword starts the Honeycomb service on specified DUTs.
49 The keyword just starts the Honeycomb and does not check its startup
50 state. Use the keyword "Check Honeycomb Startup State" to check if the
51 Honeycomb is up and running.
52 Honeycomb must be installed in "/opt" directory, otherwise the start
54 :param nodes: List of nodes to start Honeycomb on.
56 :raises HoneycombError: If Honeycomb fails to start.
59 HoneycombSetup.print_environment(nodes)
61 logger.console("\n(re)Starting Honeycomb service ...")
63 cmd = "sudo service honeycomb start"
66 if node['type'] == NodeType.DUT:
69 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
70 if int(ret_code) != 0:
71 raise HoneycombError('Node {0} failed to start Honeycomb.'.
74 logger.info("Starting the Honeycomb service on node {0} is "
75 "in progress ...".format(node['host']))
78 def stop_honeycomb_on_duts(*nodes):
79 """Stop the Honeycomb service on specified DUT nodes.
81 This keyword stops the Honeycomb service on specified nodes. It just
82 stops the Honeycomb and does not check its shutdown state. Use the
83 keyword "Check Honeycomb Shutdown State" to check if Honeycomb has
85 :param nodes: List of nodes to stop Honeycomb on.
87 :raises HoneycombError: If Honeycomb failed to stop.
89 logger.console("\nShutting down Honeycomb service ...")
91 cmd = "sudo service honeycomb stop"
95 if node['type'] == NodeType.DUT:
98 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
99 if int(ret_code) != 0:
100 errors.append(node['host'])
102 logger.info("Stopping the Honeycomb service on node {0} is "
103 "in progress ...".format(node['host']))
105 raise HoneycombError('Node(s) {0} failed to stop Honeycomb.'.
109 def restart_honeycomb_on_dut(node):
110 """Restart Honeycomb on specified DUT nodes.
112 This keyword restarts the Honeycomb service on specified DUTs. Use the
113 keyword "Check Honeycomb Startup State" to check if the Honeycomb is up
116 :param node: Node to restart Honeycomb on.
118 :raises HoneycombError: If Honeycomb fails to start.
121 logger.console("\n(re)Starting Honeycomb service ...")
123 cmd = "sudo service honeycomb restart"
127 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
128 if int(ret_code) != 0:
129 raise HoneycombError('Node {0} failed to restart Honeycomb.'.
130 format(node['host']))
133 "Honeycomb service restart is in progress on node {0}".format(
137 def check_honeycomb_startup_state(*nodes):
138 """Check state of Honeycomb service during startup on specified nodes.
140 Reads html path from template file oper_vpp_version.url.
142 Honeycomb nodes reply with connection refused or the following status
143 codes depending on startup progress: codes 200, 401, 403, 404, 500, 503
145 :param nodes: List of DUT nodes starting Honeycomb.
147 :return: True if all GETs returned code 200(OK).
150 path = HcUtil.read_path_from_url_file("oper_vpp_version")
151 expected_status_codes = (HTTPCodes.UNAUTHORIZED,
154 HTTPCodes.SERVICE_UNAVAILABLE,
155 HTTPCodes.INTERNAL_SERVER_ERROR)
158 if node['type'] == NodeType.DUT:
159 HoneycombSetup.print_ports(node)
161 status_code, _ = HTTPRequest.get(node, path,
162 enable_logging=False)
163 except HTTPRequestError:
166 ssh.exec_command("tail -n 100 /var/log/syslog")
168 if status_code == HTTPCodes.OK:
169 logger.info("Honeycomb on node {0} is up and running".
170 format(node['host']))
171 elif status_code in expected_status_codes:
172 if status_code == HTTPCodes.UNAUTHORIZED:
173 logger.info('Unauthorized. If this triggers keyword '
174 'timeout, verify Honeycomb username and '
176 raise HoneycombError('Honeycomb on node {0} running but '
177 'not yet ready.'.format(node['host']),
178 enable_logging=False)
180 raise HoneycombError('Unexpected return code: {0}.'.
183 status_code, _ = HcUtil.get_honeycomb_data(
184 node, "config_vpp_interfaces")
185 if status_code != HTTPCodes.OK:
186 raise HoneycombError('Honeycomb on node {0} running but '
187 'not yet ready.'.format(node['host']),
188 enable_logging=False)
192 def check_honeycomb_shutdown_state(*nodes):
193 """Check state of Honeycomb service during shutdown on specified nodes.
195 Honeycomb nodes reply with connection refused or the following status
196 codes depending on shutdown progress: codes 200, 404.
198 :param nodes: List of DUT nodes stopping Honeycomb.
200 :return: True if all GETs fail to connect.
203 cmd = "ps -ef | grep -v grep | grep honeycomb"
205 if node['type'] == NodeType.DUT:
207 status_code, _ = HTTPRequest.get(node, '/index.html',
208 enable_logging=False)
209 if status_code == HTTPCodes.OK:
210 raise HoneycombError('Honeycomb on node {0} is still '
211 'running.'.format(node['host']),
212 enable_logging=False)
213 elif status_code == HTTPCodes.NOT_FOUND:
214 raise HoneycombError('Honeycomb on node {0} is shutting'
215 ' down.'.format(node['host']),
216 enable_logging=False)
218 raise HoneycombError('Unexpected return code: {0}.'.
220 except HTTPRequestError:
221 logger.debug('Connection refused, checking the process '
225 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
227 raise HoneycombError('Honeycomb on node {0} is still '
228 'running.'.format(node['host']),
229 enable_logging=False)
231 logger.info("Honeycomb on node {0} has stopped".
232 format(node['host']))
236 def configure_restconf_binding_address(node):
237 """Configure Honeycomb to accept restconf requests from all IP
238 addresses. IP version is determined by node data.
240 :param node: Information about a DUT node.
242 :raises HoneycombError: If the configuration could not be changed.
245 find = "restconf-binding-address"
247 IPv6Address(unicode(node["host"]))
248 # if management IP of the node is in IPv6 format
249 replace = '\\"restconf-binding-address\\": \\"0::0\\",'
250 except (AttributeError, AddressValueError):
251 replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",'
253 argument = '"/{0}/c\\ {1}"'.format(find, replace)
254 path = "{0}/config/honeycomb.json".format(Const.REMOTE_HC_DIR)
255 command = "sed -i {0} {1}".format(argument, path)
259 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
261 raise HoneycombError("Failed to modify configuration on "
262 "node {0}, {1}".format(node, stderr))
265 def configure_jvpp_timeout(node, timeout=10):
266 """Configure timeout value for Java API commands Honeycomb sends to VPP.
268 :param node: Information about a DUT node.
269 :param timeout: Timeout value in seconds.
272 :raises HoneycombError: If the configuration could not be changed.
275 find = "jvpp-request-timeout"
276 replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout)
278 argument = '"/{0}/c\\ {1}"'.format(find, replace)
279 path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR)
280 command = "sed -i {0} {1}".format(argument, path)
284 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
286 raise HoneycombError("Failed to modify configuration on "
287 "node {0}, {1}".format(node, stderr))
290 def print_environment(nodes):
291 """Print information about the nodes to log. The information is defined
292 by commands in cmds tuple at the beginning of this method.
294 :param nodes: List of DUT nodes to get information about.
298 # TODO: When everything is set and running in VIRL env, transform this
299 # method to a keyword checking the environment.
307 "dpkg --list | grep openjdk",
308 "ls -la /opt/honeycomb")
311 if node['type'] == NodeType.DUT:
312 logger.info("Checking node {} ...".format(node['host']))
314 logger.info("Command: {}".format(cmd))
317 ssh.exec_command_sudo(cmd)
320 def print_ports(node):
321 """Uses "sudo netstat -anp | grep java" to print port where a java
324 :param node: Honeycomb node where we want to print the ports.
328 cmds = ("netstat -anp | grep java",
329 "ps -ef | grep [h]oneycomb")
331 logger.info("Checking node {} ...".format(node['host']))
333 logger.info("Command: {}".format(cmd))
336 ssh.exec_command_sudo(cmd)
339 def configure_log_level(node, level):
340 """Set Honeycomb logging to the specified level.
342 :param node: Honeycomb node.
343 :param level: Log level (INFO, DEBUG, TRACE).
348 find = 'logger name=\\"io.fd\\"'
349 replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level)
351 argument = '"/{0}/c\\ {1}"'.format(find, replace)
352 path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR)
353 command = "sed -i {0} {1}".format(argument, path)
357 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
359 raise HoneycombError("Failed to modify configuration on "
360 "node {0}, {1}".format(node, stderr))
363 def manage_honeycomb_features(node, feature, disable=False):
364 """Configure Honeycomb to use features that are disabled by default, or
365 disable previously enabled features.
367 ..Note:: If the module is not enabled in VPP, Honeycomb will
368 be unable to establish VPP connection.
370 :param node: Honeycomb node.
371 :param feature: Feature to enable.
372 :param disable: Disable the specified feature instead of enabling it.
374 :type feature: string
376 :raises HoneycombError: If the configuration could not be changed.
379 disabled_features = {
380 "NSH": "io.fd.hc2vpp.vppnsh.impl.VppNshModule"
386 if feature in disabled_features.keys():
387 # uncomment by replacing the entire line
388 find = replace = "{0}".format(disabled_features[feature])
390 replace = "// {0}".format(find)
392 argument = '"/{0}/c\\ {1}"'.format(find, replace)
393 path = "{0}/modules/*module-config"\
394 .format(Const.REMOTE_HC_DIR)
395 command = "sed -i {0} {1}".format(argument, path)
397 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
399 raise HoneycombError("Failed to modify configuration on "
400 "node {0}, {1}".format(node, stderr))
402 raise HoneycombError(
403 "Unrecognized feature {0}.".format(feature))
406 def copy_java_libraries(node):
407 """Copy Java libraries installed by vpp-api-java package to honeycomb
410 This is a (temporary?) workaround for jvpp version mismatches.
412 :param node: Honeycomb node
418 (_, stdout, _) = ssh.exec_command_sudo(
419 "ls /usr/share/java | grep ^jvpp-*")
421 files = stdout.split("\n")[:-1]
424 # jvpp-registry-17.04.jar
425 # jvpp-core-17.04.jar
427 parts = item.split("-")
428 version = "{0}-SNAPSHOT".format(parts[2][:5])
429 artifact_id = "{0}-{1}".format(parts[0], parts[1])
431 directory = "{0}/lib/io/fd/vpp/{1}/{2}".format(
432 Const.REMOTE_HC_DIR, artifact_id, version)
433 cmd = "sudo mkdir -p {0}; " \
434 "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format(
435 directory, item, artifact_id, version)
437 (ret_code, _, stderr) = ssh.exec_command(cmd)
439 raise HoneycombError("Failed to copy JVPP libraries on "
440 "node {0}, {1}".format(node, stderr))
443 def copy_odl_client(node, odl_name, src_path, dst_path):
444 """Copy ODL Client from source path to destination path.
446 :param node: Honeycomb node.
447 :param odl_name: Name of ODL client version to use.
448 :param src_path: Source Path where to find ODl client.
449 :param dst_path: Destination path.
454 :raises HoneycombError: If the operation fails.
460 cmd = "cp -r {src}/*karaf_{odl_name}* {dst}".format(
461 src=src_path, odl_name=odl_name, dst=dst_path)
463 ret_code, _, _ = ssh.exec_command(cmd, timeout=60)
464 if int(ret_code) != 0:
465 raise HoneycombError(
466 "Failed to copy ODL client on node {0}".format(node["host"]))
469 def setup_odl_client(node, path):
470 """Start ODL client on the specified node.
472 Karaf should be located in the provided path, and VPP and Honeycomb
473 should already be running, otherwise the start will fail.
474 :param node: Node to start ODL client on.
475 :param path: Path to ODL client on node.
478 :raises HoneycombError: If Honeycomb fails to start.
481 logger.console("\nStarting ODL client ...")
485 cmd = "{path}/*karaf*/bin/start clean".format(path=path)
486 ret_code, _, _ = ssh.exec_command_sudo(cmd)
488 if int(ret_code) != 0:
489 raise HoneycombError('Node {0} failed to start ODL.'.
490 format(node['host']))
492 logger.info("Starting the ODL client on node {0} is "
493 "in progress ...".format(node['host']))
496 def install_odl_features(node, path, *features):
497 """Install required features on a running ODL client.
499 :param node: Honeycomb node.
500 :param path: Path to ODL client on node.
501 :param features: Optional, list of additional features to install.
510 cmd = "{path}/*karaf*/bin/client -u karaf feature:install " \
511 "odl-restconf-all odl-netconf-connector-all".format(path=path)
512 for feature in features:
513 cmd += " {0}".format(feature)
515 ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=120)
517 if int(ret_code) != 0:
518 raise HoneycombError("Feature install did not succeed.")
521 def check_odl_startup_state(node):
522 """Check the status of ODL client startup.
524 :param node: Honeycomb node.
526 :returns: True when ODL is started.
528 :raises HoneycombError: When the response is not code 200: OK.
531 path = HcUtil.read_path_from_url_file(
532 "odl_client/odl_netconf_connector")
533 expected_status_codes = (HTTPCodes.UNAUTHORIZED,
536 HTTPCodes.SERVICE_UNAVAILABLE,
537 HTTPCodes.INTERNAL_SERVER_ERROR)
539 status_code, _ = HTTPRequest.get(node, path, timeout=10,
540 enable_logging=False)
541 if status_code == HTTPCodes.OK:
542 logger.info("ODL client on node {0} is up and running".
543 format(node['host']))
544 elif status_code in expected_status_codes:
545 if status_code == HTTPCodes.UNAUTHORIZED:
546 logger.info('Unauthorized. If this triggers keyword '
547 'timeout, verify username and password.')
548 raise HoneycombError('ODL client on node {0} running but '
549 'not yet ready.'.format(node['host']),
550 enable_logging=False)
552 raise HoneycombError('Unexpected return code: {0}.'.
557 def mount_honeycomb_on_odl(node):
558 """Tell ODL client to mount Honeycomb instance over netconf.
560 :param node: Honeycomb node.
562 :raises HoneycombError: When the response is not code 200: OK.
565 path = HcUtil.read_path_from_url_file(
566 "odl_client/odl_netconf_connector")
568 url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC,
569 "odl_client/mount_honeycomb.xml")
571 with open(url_file) as template:
572 data = template.read()
574 status_code, _ = HTTPRequest.post(
575 node, path, headers={"Content-Type": "application/xml"},
576 payload=data, timeout=10, enable_logging=False)
578 if status_code == HTTPCodes.OK:
579 logger.info("ODL mount point configured successfully.")
580 elif status_code == HTTPCodes.CONFLICT:
581 logger.info("ODL mount point was already configured.")
583 raise HoneycombError('Mount point configuration not successful')
586 def stop_odl_client(node, path):
587 """Stop ODL client service on the specified node.
589 :param node: Node to start ODL client on.
590 :param path: Path to ODL client.
593 :raises HoneycombError: If ODL client fails to stop.
599 cmd = "{0}/*karaf*/bin/stop".format(path)
603 ret_code, _, _ = ssh.exec_command_sudo(cmd)
604 if int(ret_code) != 0:
605 logger.debug("ODL Client refused to shut down.")
606 cmd = "pkill -f 'karaf'"
607 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
608 if int(ret_code) != 0:
609 raise HoneycombError('Node {0} failed to stop ODL.'.
610 format(node['host']))
612 logger.info("ODL client service stopped.")
615 def stop_vpp_service(node):
616 """Stop VPP service on the specified node.
618 :param node: VPP node.
620 :raises RuntimeError: If VPP fails to stop.
625 cmd = "service vpp stop"
626 ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=80)
627 if int(ret_code) != 0:
628 raise RuntimeError("Could not stop VPP service on node {0}".format(
632 class HoneycombStartupConfig(object):
633 """Generator for Honeycomb startup configuration.
642 while [ $STATUS -eq 100 ]
644 {java_call} -jar $(dirname $0)/{jar_filename}
646 echo "Honeycomb exited with status: $STATUS"
647 if [ $STATUS -eq 100 ]
654 self.java_call = "{scheduler} {affinity} java {jit_mode} {params}"
657 self.core_affinity = ""
665 def apply_config(self, node):
666 """Generate configuration file /opt/honeycomb/honeycomb on the specified
669 :param node: Honeycomb node.
673 self.ssh.connect(node)
674 _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar")
676 java_call = self.java_call.format(scheduler=self.scheduler,
677 affinity=self.core_affinity,
678 jit_mode=self.jit_mode,
680 self.config = self.template.format(java_call=java_call,
681 jar_filename=filename)
683 self.ssh.connect(node)
684 cmd = "echo '{config}' > /tmp/honeycomb " \
685 "&& chmod +x /tmp/honeycomb " \
686 "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".format(
688 self.ssh.exec_command(cmd)
690 def set_cpu_scheduler(self, scheduler="FIFO"):
691 """Use alternate CPU scheduler.
693 Note: OTHER scheduler doesn't load-balance over isolcpus.
695 :param scheduler: CPU scheduler to use.
699 schedulers = {"FIFO": "-f 99", # First In, First Out
700 "RR": "-r 99", # Round Robin
701 "OTHER": "-o", # Ubuntu default
703 self.scheduler = "chrt {0}".format(schedulers[scheduler])
705 def set_cpu_core_affinity(self, low, high=None):
706 """Set core affinity for the honeycomb process and subprocesses.
708 :param low: Lowest core ID number.
709 :param high: Highest core ID number. Leave empty to use a single core.
714 self.core_affinity = "taskset -c {low}-{high}".format(
715 low=low, high=high if high else low)
717 def set_jit_compiler_mode(self, jit_mode):
718 """Set running mode for Java's JIT compiler.
720 :param jit_mode: Desiret JIT mode.
724 modes = {"client": "-client", # Default
725 "server": "-server", # Higher performance but longer warmup
726 "classic": "-classic" # Disables JIT compiler
729 self.jit_mode = modes[jit_mode]
731 def set_memory_size(self, mem_min, mem_max=None):
732 """Set minimum and maximum memory use for the JVM.
734 :param mem_min: Minimum amount of memory (MB).
735 :param mem_max: Maximum amount of memory (MB). Default is 4 times
741 self.params += " -Xms{min}m -Xmx{max}m".format(
742 min=mem_min, max=mem_max if mem_max else mem_min*4)
744 def set_metaspace_size(self, mem_min, mem_max=None):
745 """Set minimum and maximum memory used for class metadata in the JVM.
747 :param mem_min: Minimum metaspace size (MB).
748 :param mem_max: Maximum metaspace size (MB). Defailt is 4 times
754 self.params += " -XX:MetaspaceSize={min}m " \
755 "-XX:MaxMetaspaceSize={max}m".format(
756 min=mem_min, max=mem_max if mem_max else mem_min*4)
758 def set_numa_optimization(self):
759 """Use optimization of memory use and garbage collection for NUMA
762 self.params += " -XX:+UseNUMA -XX:+UseParallelGC"