1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
14 """Implementation of keywords for Honeycomb setup."""
16 from ipaddress import IPv6Address, AddressValueError
18 from robot.api import logger
20 from resources.libraries.python.HTTPRequest import HTTPRequest, HTTPCodes, \
22 from resources.libraries.python.constants import Constants as Const
23 from resources.libraries.python.honeycomb.HoneycombUtil import HoneycombError
24 from resources.libraries.python.honeycomb.HoneycombUtil \
25 import HoneycombUtil as HcUtil
26 from resources.libraries.python.ssh import SSH
27 from resources.libraries.python.topology import NodeType
30 class HoneycombSetup(object):
31 """Implements keywords for Honeycomb setup.
33 The keywords implemented in this class make possible to:
36 - check the Honeycomb start-up state,
37 - check the Honeycomb shutdown state,
38 - add VPP to the topology.
45 def start_honeycomb_on_duts(*nodes):
46 """Start Honeycomb on specified DUT nodes.
48 This keyword starts the Honeycomb service on specified DUTs.
49 The keyword just starts the Honeycomb and does not check its startup
50 state. Use the keyword "Check Honeycomb Startup State" to check if the
51 Honeycomb is up and running.
52 Honeycomb must be installed in "/opt" directory, otherwise the start
54 :param nodes: List of nodes to start Honeycomb on.
56 :raises HoneycombError: If Honeycomb fails to start.
59 HoneycombSetup.print_environment(nodes)
61 logger.console("\n(re)Starting Honeycomb service ...")
63 cmd = "sudo service honeycomb start"
66 if node['type'] == NodeType.DUT:
69 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
70 if int(ret_code) != 0:
71 raise HoneycombError('Node {0} failed to start Honeycomb.'.
74 logger.info("Starting the Honeycomb service on node {0} is "
75 "in progress ...".format(node['host']))
78 def stop_honeycomb_on_duts(*nodes):
79 """Stop the Honeycomb service on specified DUT nodes.
81 This keyword stops the Honeycomb service on specified nodes. It just
82 stops the Honeycomb and does not check its shutdown state. Use the
83 keyword "Check Honeycomb Shutdown State" to check if Honeycomb has
85 :param nodes: List of nodes to stop Honeycomb on.
87 :raises HoneycombError: If Honeycomb failed to stop.
89 logger.console("\nShutting down Honeycomb service ...")
91 cmd = "sudo service honeycomb stop"
95 if node['type'] == NodeType.DUT:
98 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
99 if int(ret_code) != 0:
100 errors.append(node['host'])
102 logger.info("Stopping the Honeycomb service on node {0} is "
103 "in progress ...".format(node['host']))
105 raise HoneycombError('Node(s) {0} failed to stop Honeycomb.'.
109 def restart_honeycomb_on_dut(node):
110 """Restart Honeycomb on specified DUT nodes.
112 This keyword restarts the Honeycomb service on specified DUTs. Use the
113 keyword "Check Honeycomb Startup State" to check if the Honeycomb is up
116 :param node: Node to restart Honeycomb on.
118 :raises HoneycombError: If Honeycomb fails to start.
121 logger.console("\n(re)Starting Honeycomb service ...")
123 cmd = "sudo service honeycomb restart"
127 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
128 if int(ret_code) != 0:
129 raise HoneycombError('Node {0} failed to restart Honeycomb.'.
130 format(node['host']))
133 "Honeycomb service restart is in progress on node {0}".format(
137 def check_honeycomb_startup_state(*nodes):
138 """Check state of Honeycomb service during startup on specified nodes.
140 Reads html path from template file oper_vpp_version.url.
142 Honeycomb nodes reply with connection refused or the following status
143 codes depending on startup progress: codes 200, 401, 403, 404, 500, 503
145 :param nodes: List of DUT nodes starting Honeycomb.
147 :return: True if all GETs returned code 200(OK).
150 path = HcUtil.read_path_from_url_file("oper_vpp_version")
151 expected_status_codes = (HTTPCodes.UNAUTHORIZED,
154 HTTPCodes.SERVICE_UNAVAILABLE,
155 HTTPCodes.INTERNAL_SERVER_ERROR)
158 if node['type'] == NodeType.DUT:
159 HoneycombSetup.print_ports(node)
161 status_code, _ = HTTPRequest.get(node, path,
162 enable_logging=False)
163 except HTTPRequestError:
166 ret_code, _, _ = ssh.exec_command_sudo(
167 "tail -n 100 /var/log/syslog")
169 # It's probably Centos
170 ssh.exec_command_sudo("tail -n 100 /var/log/messages")
172 if status_code == HTTPCodes.OK:
173 logger.info("Honeycomb on node {0} is up and running".
174 format(node['host']))
175 elif status_code in expected_status_codes:
176 if status_code == HTTPCodes.UNAUTHORIZED:
177 logger.info('Unauthorized. If this triggers keyword '
178 'timeout, verify Honeycomb username and '
180 raise HoneycombError('Honeycomb on node {0} running but '
181 'not yet ready.'.format(node['host']),
182 enable_logging=False)
184 raise HoneycombError('Unexpected return code: {0}.'.
187 status_code, _ = HcUtil.get_honeycomb_data(
188 node, "config_vpp_interfaces")
189 if status_code != HTTPCodes.OK:
190 raise HoneycombError('Honeycomb on node {0} running but '
191 'not yet ready.'.format(node['host']),
192 enable_logging=False)
196 def check_honeycomb_shutdown_state(*nodes):
197 """Check state of Honeycomb service during shutdown on specified nodes.
199 Honeycomb nodes reply with connection refused or the following status
200 codes depending on shutdown progress: codes 200, 404.
202 :param nodes: List of DUT nodes stopping Honeycomb.
204 :return: True if all GETs fail to connect.
207 cmd = "ps -ef | grep -v grep | grep honeycomb"
209 if node['type'] == NodeType.DUT:
211 status_code, _ = HTTPRequest.get(node, '/index.html',
212 enable_logging=False)
213 if status_code == HTTPCodes.OK:
214 raise HoneycombError('Honeycomb on node {0} is still '
215 'running.'.format(node['host']),
216 enable_logging=False)
217 elif status_code == HTTPCodes.NOT_FOUND:
218 raise HoneycombError('Honeycomb on node {0} is shutting'
219 ' down.'.format(node['host']),
220 enable_logging=False)
222 raise HoneycombError('Unexpected return code: {0}.'.
224 except HTTPRequestError:
225 logger.debug('Connection refused, checking the process '
229 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
231 raise HoneycombError('Honeycomb on node {0} is still '
232 'running.'.format(node['host']),
233 enable_logging=False)
235 logger.info("Honeycomb on node {0} has stopped".
236 format(node['host']))
240 def configure_restconf_binding_address(node):
241 """Configure Honeycomb to accept restconf requests from all IP
242 addresses. IP version is determined by node data.
244 :param node: Information about a DUT node.
246 :raises HoneycombError: If the configuration could not be changed.
249 find = "restconf-binding-address"
251 IPv6Address(unicode(node["host"]))
252 # if management IP of the node is in IPv6 format
253 replace = '\\"restconf-binding-address\\": \\"0::0\\",'
254 except (AttributeError, AddressValueError):
255 replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",'
257 argument = '"/{0}/c\\ {1}"'.format(find, replace)
258 path = "{0}/config/honeycomb.json".format(Const.REMOTE_HC_DIR)
259 command = "sed -i {0} {1}".format(argument, path)
263 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
265 raise HoneycombError("Failed to modify configuration on "
266 "node {0}, {1}".format(node, stderr))
269 def configure_jvpp_timeout(node, timeout=10):
270 """Configure timeout value for Java API commands Honeycomb sends to VPP.
272 :param node: Information about a DUT node.
273 :param timeout: Timeout value in seconds.
276 :raises HoneycombError: If the configuration could not be changed.
279 find = "jvpp-request-timeout"
280 replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout)
282 argument = '"/{0}/c\\ {1}"'.format(find, replace)
283 path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR)
284 command = "sed -i {0} {1}".format(argument, path)
288 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
290 raise HoneycombError("Failed to modify configuration on "
291 "node {0}, {1}".format(node, stderr))
294 def print_environment(nodes):
295 """Print information about the nodes to log. The information is defined
296 by commands in cmds tuple at the beginning of this method.
298 :param nodes: List of DUT nodes to get information about.
302 # TODO: When everything is set and running in VIRL env, transform this
303 # method to a keyword checking the environment.
311 "dpkg --list | grep openjdk",
312 "ls -la /opt/honeycomb")
315 if node['type'] == NodeType.DUT:
316 logger.info("Checking node {} ...".format(node['host']))
318 logger.info("Command: {}".format(cmd))
321 ssh.exec_command_sudo(cmd)
324 def print_ports(node):
325 """Uses "sudo netstat -anp | grep java" to print port where a java
328 :param node: Honeycomb node where we want to print the ports.
332 cmds = ("netstat -anp | grep java",
333 "ps -ef | grep [h]oneycomb")
335 logger.info("Checking node {} ...".format(node['host']))
337 logger.info("Command: {}".format(cmd))
340 ssh.exec_command_sudo(cmd)
343 def configure_log_level(node, level):
344 """Set Honeycomb logging to the specified level.
346 :param node: Honeycomb node.
347 :param level: Log level (INFO, DEBUG, TRACE).
352 find = 'logger name=\\"io.fd\\"'
353 replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level)
355 argument = '"/{0}/c\\ {1}"'.format(find, replace)
356 path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR)
357 command = "sed -i {0} {1}".format(argument, path)
361 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
363 raise HoneycombError("Failed to modify configuration on "
364 "node {0}, {1}".format(node, stderr))
367 def manage_honeycomb_features(node, feature, disable=False):
368 """Configure Honeycomb to use features that are disabled by default, or
369 disable previously enabled features.
371 ..Note:: If the module is not enabled in VPP, Honeycomb will
372 be unable to establish VPP connection.
374 :param node: Honeycomb node.
375 :param feature: Feature to enable.
376 :param disable: Disable the specified feature instead of enabling it.
378 :type feature: string
380 :raises HoneycombError: If the configuration could not be changed.
383 disabled_features = {
384 "NSH": "io.fd.hc2vpp.vppnsh.impl.VppNshModule"
390 if feature in disabled_features.keys():
391 # uncomment by replacing the entire line
392 find = replace = "{0}".format(disabled_features[feature])
394 replace = "// {0}".format(find)
396 argument = '"/{0}/c\\ {1}"'.format(find, replace)
397 path = "{0}/modules/*module-config"\
398 .format(Const.REMOTE_HC_DIR)
399 command = "sed -i {0} {1}".format(argument, path)
401 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
403 raise HoneycombError("Failed to modify configuration on "
404 "node {0}, {1}".format(node, stderr))
406 raise HoneycombError(
407 "Unrecognized feature {0}.".format(feature))
410 def copy_java_libraries(node):
411 """Copy Java libraries installed by vpp-api-java package to honeycomb
414 This is a (temporary?) workaround for jvpp version mismatches.
416 :param node: Honeycomb node
422 (_, stdout, _) = ssh.exec_command_sudo(
423 "ls /usr/share/java | grep ^jvpp-*")
425 files = stdout.split("\n")[:-1]
428 # jvpp-registry-17.04.jar
429 # jvpp-core-17.04.jar
431 parts = item.split("-")
432 version = "{0}-SNAPSHOT".format(parts[2][:5])
433 artifact_id = "{0}-{1}".format(parts[0], parts[1])
435 directory = "{0}/lib/io/fd/vpp/{1}/{2}".format(
436 Const.REMOTE_HC_DIR, artifact_id, version)
437 cmd = "sudo mkdir -p {0}; " \
438 "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format(
439 directory, item, artifact_id, version)
441 (ret_code, _, stderr) = ssh.exec_command(cmd)
443 raise HoneycombError("Failed to copy JVPP libraries on "
444 "node {0}, {1}".format(node, stderr))
447 def copy_odl_client(node, odl_name, src_path, dst_path):
448 """Copy ODL Client from source path to destination path.
450 :param node: Honeycomb node.
451 :param odl_name: Name of ODL client version to use.
452 :param src_path: Source Path where to find ODl client.
453 :param dst_path: Destination path.
458 :raises HoneycombError: If the operation fails.
464 cmd = "cp -r {src}/*karaf_{odl_name}* {dst}".format(
465 src=src_path, odl_name=odl_name, dst=dst_path)
467 ret_code, _, _ = ssh.exec_command(cmd, timeout=60)
468 if int(ret_code) != 0:
469 raise HoneycombError(
470 "Failed to copy ODL client on node {0}".format(node["host"]))
473 def setup_odl_client(node, path):
474 """Start ODL client on the specified node.
476 Karaf should be located in the provided path, and VPP and Honeycomb
477 should already be running, otherwise the start will fail.
478 :param node: Node to start ODL client on.
479 :param path: Path to ODL client on node.
482 :raises HoneycombError: If Honeycomb fails to start.
485 logger.console("\nStarting ODL client ...")
489 cmd = "{path}/*karaf*/bin/start clean".format(path=path)
490 ret_code, _, _ = ssh.exec_command_sudo(cmd)
492 if int(ret_code) != 0:
493 raise HoneycombError('Node {0} failed to start ODL.'.
494 format(node['host']))
496 logger.info("Starting the ODL client on node {0} is "
497 "in progress ...".format(node['host']))
500 def install_odl_features(node, path, *features):
501 """Install required features on a running ODL client.
503 :param node: Honeycomb node.
504 :param path: Path to ODL client on node.
505 :param features: Optional, list of additional features to install.
514 cmd = "{path}/*karaf*/bin/client -u karaf feature:install " \
515 "odl-restconf-all odl-netconf-connector-all".format(path=path)
516 for feature in features:
517 cmd += " {0}".format(feature)
519 ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=120)
521 if int(ret_code) != 0:
522 raise HoneycombError("Feature install did not succeed.")
525 def check_odl_startup_state(node):
526 """Check the status of ODL client startup.
528 :param node: Honeycomb node.
530 :returns: True when ODL is started.
532 :raises HoneycombError: When the response is not code 200: OK.
535 path = HcUtil.read_path_from_url_file(
536 "odl_client/odl_netconf_connector")
537 expected_status_codes = (HTTPCodes.UNAUTHORIZED,
540 HTTPCodes.SERVICE_UNAVAILABLE,
541 HTTPCodes.INTERNAL_SERVER_ERROR)
543 status_code, _ = HTTPRequest.get(node, path, timeout=10,
544 enable_logging=False)
545 if status_code == HTTPCodes.OK:
546 logger.info("ODL client on node {0} is up and running".
547 format(node['host']))
548 elif status_code in expected_status_codes:
549 if status_code == HTTPCodes.UNAUTHORIZED:
550 logger.info('Unauthorized. If this triggers keyword '
551 'timeout, verify username and password.')
552 raise HoneycombError('ODL client on node {0} running but '
553 'not yet ready.'.format(node['host']),
554 enable_logging=False)
556 raise HoneycombError('Unexpected return code: {0}.'.
561 def check_odl_shutdown_state(node):
562 """Check the status of ODL client shutdown.
564 :param node: Honeycomb node.
566 :returns: True when ODL is stopped.
568 :raises HoneycombError: When the response is not code 200: OK.
571 cmd = "pgrep -f karaf"
572 path = HcUtil.read_path_from_url_file(
573 "odl_client/odl_netconf_connector")
576 status_code, _ = HTTPRequest.get(node, path, timeout=10,
577 enable_logging=False)
578 raise HoneycombError("ODL client is still running.")
579 except HTTPRequestError:
580 logger.debug("Connection refused, checking process state....")
583 ret_code, _, _ = ssh.exec_command(cmd)
585 raise HoneycombError("ODL client is still running.")
590 def mount_honeycomb_on_odl(node):
591 """Tell ODL client to mount Honeycomb instance over netconf.
593 :param node: Honeycomb node.
595 :raises HoneycombError: When the response is not code 200: OK.
598 path = HcUtil.read_path_from_url_file(
599 "odl_client/odl_netconf_connector")
601 url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC,
602 "odl_client/mount_honeycomb.xml")
604 with open(url_file) as template:
605 data = template.read()
607 status_code, _ = HTTPRequest.post(
608 node, path, headers={"Content-Type": "application/xml"},
609 payload=data, timeout=10, enable_logging=False)
611 if status_code == HTTPCodes.OK:
612 logger.info("ODL mount point configured successfully.")
613 elif status_code == HTTPCodes.CONFLICT:
614 logger.info("ODL mount point was already configured.")
616 raise HoneycombError('Mount point configuration not successful')
619 def stop_odl_client(node, path):
620 """Stop ODL client service on the specified node.
622 :param node: Node to start ODL client on.
623 :param path: Path to ODL client.
626 :raises HoneycombError: If ODL client fails to stop.
632 cmd = "{0}/*karaf*/bin/stop".format(path)
636 ret_code, _, _ = ssh.exec_command_sudo(cmd)
637 if int(ret_code) != 0:
638 logger.debug("ODL Client refused to shut down.")
639 cmd = "pkill -f 'karaf'"
640 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
641 if int(ret_code) != 0:
642 raise HoneycombError('Node {0} failed to stop ODL.'.
643 format(node['host']))
645 logger.info("ODL client service stopped.")
648 def stop_vpp_service(node):
649 """Stop VPP service on the specified node.
651 :param node: VPP node.
653 :raises RuntimeError: If VPP fails to stop.
658 cmd = "service vpp stop"
659 ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=80)
660 if int(ret_code) != 0:
661 logger.debug("VPP service refused to shut down.")
663 class HoneycombStartupConfig(object):
664 """Generator for Honeycomb startup configuration.
673 while [ $STATUS -eq 100 ]
675 {java_call} -jar $(dirname $0)/{jar_filename}
677 echo "Honeycomb exited with status: $STATUS"
678 if [ $STATUS -eq 100 ]
685 self.java_call = "{scheduler} {affinity} java {jit_mode} {params}"
688 self.core_affinity = ""
696 def apply_config(self, node):
697 """Generate configuration file /opt/honeycomb/honeycomb on the specified
700 :param node: Honeycomb node.
704 self.ssh.connect(node)
705 _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar")
707 java_call = self.java_call.format(scheduler=self.scheduler,
708 affinity=self.core_affinity,
709 jit_mode=self.jit_mode,
711 self.config = self.template.format(java_call=java_call,
712 jar_filename=filename)
714 self.ssh.connect(node)
715 cmd = "echo '{config}' > /tmp/honeycomb " \
716 "&& chmod +x /tmp/honeycomb " \
717 "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".format(
719 self.ssh.exec_command(cmd)
721 def set_cpu_scheduler(self, scheduler="FIFO"):
722 """Use alternate CPU scheduler.
724 Note: OTHER scheduler doesn't load-balance over isolcpus.
726 :param scheduler: CPU scheduler to use.
730 schedulers = {"FIFO": "-f 99", # First In, First Out
731 "RR": "-r 99", # Round Robin
732 "OTHER": "-o", # Ubuntu default
734 self.scheduler = "chrt {0}".format(schedulers[scheduler])
736 def set_cpu_core_affinity(self, low, high=None):
737 """Set core affinity for the honeycomb process and subprocesses.
739 :param low: Lowest core ID number.
740 :param high: Highest core ID number. Leave empty to use a single core.
745 self.core_affinity = "taskset -c {low}-{high}".format(
746 low=low, high=high if high else low)
748 def set_jit_compiler_mode(self, jit_mode):
749 """Set running mode for Java's JIT compiler.
751 :param jit_mode: Desiret JIT mode.
755 modes = {"client": "-client", # Default
756 "server": "-server", # Higher performance but longer warmup
757 "classic": "-classic" # Disables JIT compiler
760 self.jit_mode = modes[jit_mode]
762 def set_memory_size(self, mem_min, mem_max=None):
763 """Set minimum and maximum memory use for the JVM.
765 :param mem_min: Minimum amount of memory (MB).
766 :param mem_max: Maximum amount of memory (MB). Default is 4 times
772 self.params += " -Xms{min}m -Xmx{max}m".format(
773 min=mem_min, max=mem_max if mem_max else mem_min*4)
775 def set_metaspace_size(self, mem_min, mem_max=None):
776 """Set minimum and maximum memory used for class metadata in the JVM.
778 :param mem_min: Minimum metaspace size (MB).
779 :param mem_max: Maximum metaspace size (MB). Defailt is 4 times
785 self.params += " -XX:MetaspaceSize={min}m " \
786 "-XX:MaxMetaspaceSize={max}m".format(
787 min=mem_min, max=mem_max if mem_max else mem_min*4)
789 def set_numa_optimization(self):
790 """Use optimization of memory use and garbage collection for NUMA
793 self.params += " -XX:+UseNUMA -XX:+UseParallelGC"