Fix Tap failing tests
[csit.git] / resources / libraries / python / honeycomb / HoneycombSetup.py
1 # Copyright (c) 2018 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
5 #
6 #     http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
13
14 """Implementation of keywords for Honeycomb setup."""
15
16 from json import loads
17 from time import time, sleep
18
19 from ipaddress import IPv6Address, AddressValueError
20
21 from robot.api import logger
22
23 from resources.libraries.python.HTTPRequest import HTTPRequest, HTTPCodes, \
24     HTTPRequestError
25 from resources.libraries.python.Constants import Constants as Const
26 from resources.libraries.python.honeycomb.HoneycombUtil import HoneycombError
27 from resources.libraries.python.honeycomb.HoneycombUtil \
28     import HoneycombUtil as HcUtil
29 from resources.libraries.python.ssh import SSH
30 from resources.libraries.python.topology import NodeType
31
32
33 class HoneycombSetup(object):
34     """Implements keywords for Honeycomb setup.
35
36     The keywords implemented in this class make possible to:
37     - start Honeycomb,
38     - stop Honeycomb,
39     - check the Honeycomb start-up state,
40     - check the Honeycomb shutdown state,
41     - add VPP to the topology.
42     """
43
44     def __init__(self):
45         pass
46
47     @staticmethod
48     def start_honeycomb_on_duts(*nodes):
49         """Start Honeycomb on specified DUT nodes.
50
51         This keyword starts the Honeycomb service on specified DUTs.
52         The keyword just starts the Honeycomb and does not check its startup
53         state. Use the keyword "Check Honeycomb Startup State" to check if the
54         Honeycomb is up and running.
55         Honeycomb must be installed in "/opt" directory, otherwise the start
56         will fail.
57
58         :param nodes: List of nodes to start Honeycomb on.
59         :type nodes: list
60         :raises HoneycombError: If Honeycomb fails to start.
61         """
62
63         HoneycombSetup.print_environment(nodes)
64
65         cmd = "sudo service honeycomb start"
66
67         for node in nodes:
68             if node['type'] == NodeType.DUT:
69                 logger.console(
70                     "\n(re)Starting Honeycomb service on node {0}".format(
71                         node["host"]))
72                 ssh = SSH()
73                 ssh.connect(node)
74                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
75                 if int(ret_code) != 0:
76                     raise HoneycombError('Node {0} failed to start Honeycomb.'.
77                                          format(node['host']))
78                 else:
79                     logger.info("Starting the Honeycomb service on node {0} is "
80                                 "in progress ...".format(node['host']))
81
82     @staticmethod
83     def stop_honeycomb_on_duts(*nodes):
84         """Stop the Honeycomb service on specified DUT nodes.
85
86         This keyword stops the Honeycomb service on specified nodes. It just
87         stops the Honeycomb and does not check its shutdown state. Use the
88         keyword "Check Honeycomb Shutdown State" to check if Honeycomb has
89         stopped.
90
91         :param nodes: List of nodes to stop Honeycomb on.
92         :type nodes: list
93         :raises HoneycombError: If Honeycomb failed to stop.
94         """
95
96         cmd = "sudo service honeycomb stop"
97         errors = []
98
99         for node in nodes:
100             if node['type'] == NodeType.DUT:
101                 logger.console(
102                     "\nShutting down Honeycomb service on node {0}".format(
103                         node["host"]))
104                 ssh = SSH()
105                 ssh.connect(node)
106                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
107                 if int(ret_code) != 0:
108                     errors.append(node['host'])
109                 else:
110                     logger.info("Stopping the Honeycomb service on node {0} is "
111                                 "in progress ...".format(node['host']))
112         if errors:
113             raise HoneycombError('Node(s) {0} failed to stop Honeycomb.'.
114                                  format(errors))
115
116     @staticmethod
117     def restart_honeycomb_on_dut(node):
118         """Restart Honeycomb on specified DUT nodes.
119
120         This keyword restarts the Honeycomb service on specified DUTs. Use the
121         keyword "Check Honeycomb Startup State" to check if the Honeycomb is up
122         and running.
123
124         :param node: Node to restart Honeycomb on.
125         :type node: dict
126         :raises HoneycombError: If Honeycomb fails to start.
127         """
128
129         logger.console(
130             "\n(re)Starting Honeycomb service on node {0}".format(node["host"]))
131
132         cmd = "sudo service honeycomb restart"
133
134         ssh = SSH()
135         ssh.connect(node)
136         (ret_code, _, _) = ssh.exec_command_sudo(cmd)
137         if int(ret_code) != 0:
138             raise HoneycombError('Node {0} failed to restart Honeycomb.'.
139                                  format(node['host']))
140         else:
141             logger.info(
142                 "Honeycomb service restart is in progress on node {0}".format(
143                     node['host']))
144
145     @staticmethod
146     def check_honeycomb_startup_state(node, timeout=360, retries=20,
147                                       interval=15):
148         """Repeatedly check the status of Honeycomb startup until it is fully
149         started or until timeout or max retries is reached.
150
151         :param node: Honeycomb node.
152         :param timeout: Timeout value in seconds.
153         :param retries: Max number of retries.
154         :param interval: Interval between checks, in seconds.
155         :type node: dict
156         :type timeout: int
157         :type retries: int
158         :type interval: int
159         :raises HoneycombError: If the Honeycomb process IP cannot be found,
160             or if timeout or number of retries is exceeded.
161         """
162
163         ssh = SSH()
164         ssh.connect(node)
165
166         count = 0
167         start = time()
168         while time() - start < timeout and count < retries:
169             count += 1
170
171             try:
172                 status_code_version, _ = HcUtil.get_honeycomb_data(
173                     node, "oper_vpp_version")
174                 status_code_if_cfg, _ = HcUtil.get_honeycomb_data(
175                     node, "config_vpp_interfaces")
176                 status_code_if_oper, _ = HcUtil.get_honeycomb_data(
177                     node, "oper_vpp_interfaces")
178             except HTTPRequestError:
179                 sleep(interval)
180                 continue
181             if status_code_if_cfg == HTTPCodes.OK\
182                     and status_code_if_cfg == HTTPCodes.OK\
183                     and status_code_if_oper == HTTPCodes.OK:
184                 logger.info("Check successful, Honeycomb is up and running.")
185                 break
186             else:
187                 logger.debug(
188                     "Attempt ${count} failed on Restconf check. Status codes:\n"
189                     "Version: {version}\n"
190                     "Interface config: {if_cfg}\n"
191                     "Interface operational: {if_oper}".format(
192                         count=count,
193                         version=status_code_version,
194                         if_cfg=status_code_if_cfg,
195                         if_oper=status_code_if_oper))
196                 sleep(interval)
197                 continue
198         else:
199             _, vpp_status, _ = ssh.exec_command("sudo service vpp status")
200             raise HoneycombError(
201                 "Timeout or max retries exceeded. Status of VPP:\n"
202                 "{vpp_status}".format(vpp_status=vpp_status))
203
204     @staticmethod
205     def check_honeycomb_shutdown_state(node):
206         """Check state of Honeycomb service during shutdown on specified nodes.
207
208         Honeycomb nodes reply with connection refused or the following status
209         codes depending on shutdown progress: codes 200, 404.
210
211         :param node: List of DUT nodes stopping Honeycomb.
212         :type node: dict
213         :returns: True if all GETs fail to connect.
214         :rtype: bool
215         """
216         cmd = "pgrep honeycomb"
217
218         ssh = SSH()
219         ssh.connect(node)
220         (ret_code, _, _) = ssh.exec_command_sudo(cmd)
221         if ret_code == 0:
222             raise HoneycombError('Honeycomb on node {0} is still '
223                                  'running.'.format(node['host']),
224                                  enable_logging=False)
225         else:
226             logger.info("Honeycomb on node {0} has stopped".
227                         format(node['host']))
228         return True
229
230     @staticmethod
231     def configure_restconf_binding_address(node):
232         """Configure Honeycomb to accept restconf requests from all IP
233         addresses. IP version is determined by node data.
234
235         :param node: Information about a DUT node.
236         :type node: dict
237         :raises HoneycombError: If the configuration could not be changed.
238         """
239
240         find = "restconf-binding-address"
241         try:
242             IPv6Address(unicode(node["host"]))
243             # if management IP of the node is in IPv6 format
244             replace = '\\"restconf-binding-address\\": \\"0::0\\",'
245         except (AttributeError, AddressValueError):
246             replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",'
247
248         argument = '"/{0}/c\\ {1}"'.format(find, replace)
249         path = "{0}/config/restconf.json".format(Const.REMOTE_HC_DIR)
250         command = "sed -i {0} {1}".format(argument, path)
251
252         ssh = SSH()
253         ssh.connect(node)
254         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
255         if ret_code != 0:
256             raise HoneycombError("Failed to modify configuration on "
257                                  "node {0}, {1}".format(node, stderr))
258
259     @staticmethod
260     def configure_jvpp_timeout(node, timeout=10):
261         """Configure timeout value for Java API commands Honeycomb sends to VPP.
262
263         :param node: Information about a DUT node.
264         :param timeout: Timeout value in seconds.
265         :type node: dict
266         :type timeout: int
267         :raises HoneycombError: If the configuration could not be changed.
268         """
269
270         find = "jvpp-request-timeout"
271         replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout)
272
273         argument = '"/{0}/c\\ {1}"'.format(find, replace)
274         path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR)
275         command = "sed -i {0} {1}".format(argument, path)
276
277         ssh = SSH()
278         ssh.connect(node)
279         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
280         if ret_code != 0:
281             raise HoneycombError("Failed to modify configuration on "
282                                  "node {0}, {1}".format(node, stderr))
283
284     @staticmethod
285     def print_environment(nodes):
286         """Print information about the nodes to log. The information is defined
287         by commands in cmds tuple at the beginning of this method.
288
289         :param nodes: List of DUT nodes to get information about.
290         :type nodes: list
291         """
292
293         # TODO: When everything is set and running in VIRL env, transform this
294         # method to a keyword checking the environment.
295
296         cmds = ("uname -a",
297                 "df -lh",
298                 "echo $JAVA_HOME",
299                 "echo $PATH",
300                 "which java",
301                 "java -version",
302                 "dpkg --list | grep openjdk",
303                 "ls -la /opt/honeycomb",
304                 "cat /opt/honeycomb/modules/*module-config")
305
306         for node in nodes:
307             if node['type'] == NodeType.DUT:
308                 logger.info("Checking node {} ...".format(node['host']))
309                 for cmd in cmds:
310                     logger.info("Command: {}".format(cmd))
311                     ssh = SSH()
312                     ssh.connect(node)
313                     ssh.exec_command_sudo(cmd)
314
315     @staticmethod
316     def print_ports(node):
317         """Uses "sudo netstat -anp | grep java" to print port where a java
318         application listens.
319
320         :param node: Honeycomb node where we want to print the ports.
321         :type node: dict
322         """
323
324         cmds = ("netstat -anp | grep java",
325                 "ps -ef | grep [h]oneycomb")
326
327         logger.info("Checking node {} ...".format(node['host']))
328         for cmd in cmds:
329             logger.info("Command: {}".format(cmd))
330             ssh = SSH()
331             ssh.connect(node)
332             ssh.exec_command_sudo(cmd)
333
334     @staticmethod
335     def configure_log_level(node, level):
336         """Set Honeycomb logging to the specified level.
337
338         :param node: Honeycomb node.
339         :param level: Log level (INFO, DEBUG, TRACE).
340         :type node: dict
341         :type level: str
342         """
343
344         find = 'logger name=\\"io.fd\\"'
345         replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level)
346
347         argument = '"/{0}/c\\ {1}"'.format(find, replace)
348         path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR)
349         command = "sed -i {0} {1}".format(argument, path)
350
351         ssh = SSH()
352         ssh.connect(node)
353         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
354         if ret_code != 0:
355             raise HoneycombError("Failed to modify configuration on "
356                                  "node {0}, {1}".format(node, stderr))
357
358     @staticmethod
359     def manage_honeycomb_features(node, feature, disable=False):
360         """Configure Honeycomb to use features that are disabled by default, or
361         disable previously enabled features.
362
363         ..Note:: If the module is not enabled in VPP, Honeycomb will
364         be unable to establish VPP connection.
365
366         :param node: Honeycomb node.
367         :param feature: Feature to enable.
368         :param disable: Disable the specified feature instead of enabling it.
369         :type node: dict
370         :type feature: string
371         :type disable: bool
372         :raises HoneycombError: If the configuration could not be changed.
373         """
374
375         disabled_features = {
376             "NSH": ["io.fd.hc2vpp.vppnsh.impl.VppNshModule"],
377             "BGP": ["io.fd.hc2vpp.bgp.inet.BgpInetModule",
378                     "io.fd.honeycomb.infra.bgp.BgpModule",
379                     "io.fd.honeycomb.infra.bgp.BgpReadersModule",
380                     "io.fd.honeycomb.infra.bgp.BgpWritersModule",
381                     "io.fd.honeycomb.northbound.bgp.extension.InetModule",
382                     "io.fd.honeycomb.northbound.bgp.extension.EvpnModule",
383                     "io.fd.honeycomb.northbound.bgp.extension.L3VpnV4Module",
384                     "io.fd.honeycomb.northbound.bgp.extension.L3VpnV6Module",
385                     "io.fd.honeycomb.northbound.bgp.extension."
386                     "LabeledUnicastModule",
387                     "io.fd.honeycomb.northbound.bgp.extension.LinkstateModule"]
388         }
389
390         ssh = SSH()
391         ssh.connect(node)
392
393         if feature in disabled_features.keys():
394             # for every module, uncomment by replacing the entire line
395             for item in disabled_features[feature]:
396                 find = replace = "{0}".format(item)
397                 if disable:
398                     replace = "// {0}".format(find)
399
400                 argument = '"/{0}/c\\ {1}"'.format(find, replace)
401                 path = "{0}/modules/*module-config"\
402                     .format(Const.REMOTE_HC_DIR)
403                 command = "sed -i {0} {1}".format(argument, path)
404
405                 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
406                 if ret_code != 0:
407                     raise HoneycombError("Failed to modify configuration on "
408                                          "node {0}, {1}".format(node, stderr))
409         else:
410             raise HoneycombError(
411                 "Unrecognized feature {0}.".format(feature))
412
413     @staticmethod
414     def copy_java_libraries(node):
415         """Copy Java libraries installed by vpp-api-java package to honeycomb
416         lib folder.
417
418         This is a (temporary?) workaround for jvpp version mismatches.
419
420         :param node: Honeycomb node
421         :type node: dict
422         """
423
424         ssh = SSH()
425         ssh.connect(node)
426         (_, stdout, _) = ssh.exec_command_sudo(
427             "ls /usr/share/java | grep ^jvpp-*")
428
429         files = stdout.split("\n")[:-1]
430         for item in files:
431             # example filenames:
432             # jvpp-registry-17.04.jar
433             # jvpp-core-17.04.jar
434
435             parts = item.split("-")
436             version = "{0}-SNAPSHOT".format(parts[2][:5])
437             artifact_id = "{0}-{1}".format(parts[0], parts[1])
438
439             directory = "{0}/lib/io/fd/vpp/{1}/{2}".format(
440                 Const.REMOTE_HC_DIR, artifact_id, version)
441             cmd = "sudo mkdir -p {0}; " \
442                   "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format(
443                       directory, item, artifact_id, version)
444
445             (ret_code, _, stderr) = ssh.exec_command(cmd)
446             if ret_code != 0:
447                 raise HoneycombError("Failed to copy JVPP libraries on "
448                                      "node {0}, {1}".format(node, stderr))
449
450     @staticmethod
451     def copy_odl_client(node, odl_name, src_path, dst_path):
452         """Copy ODL Client from source path to destination path.
453
454         :param node: Honeycomb node.
455         :param odl_name: Name of ODL client version to use.
456         :param src_path: Source Path where to find ODl client.
457         :param dst_path: Destination path.
458         :type node: dict
459         :type odl_name: str
460         :type src_path: str
461         :type dst_path: str
462         :raises HoneycombError: If the operation fails.
463         """
464
465         ssh = SSH()
466         ssh.connect(node)
467
468         cmd = "sudo rm -rf {dst}/*karaf_{odl_name} && " \
469               "cp -r {src}/*karaf_{odl_name}* {dst}".format(
470                   src=src_path, odl_name=odl_name, dst=dst_path)
471
472         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=180)
473         if int(ret_code) != 0:
474             raise HoneycombError(
475                 "Failed to copy ODL client on node {0}".format(node["host"]))
476
477     @staticmethod
478     def setup_odl_client(node, path):
479         """Start ODL client on the specified node.
480
481         Karaf should be located in the provided path, and VPP and Honeycomb
482         should already be running, otherwise the start will fail.
483
484         :param node: Node to start ODL client on.
485         :param path: Path to ODL client on node.
486         :type node: dict
487         :type path: str
488         :raises HoneycombError: If Honeycomb fails to start.
489         """
490
491         logger.console("\nStarting ODL client ...")
492         ssh = SSH()
493         ssh.connect(node)
494
495         cmd = "{path}/*karaf*/bin/start clean".format(path=path)
496         ret_code, _, _ = ssh.exec_command_sudo(cmd)
497
498         if int(ret_code) != 0:
499             raise HoneycombError('Node {0} failed to start ODL.'.
500                                  format(node['host']))
501         else:
502             logger.info("Starting the ODL client on node {0} is "
503                         "in progress ...".format(node['host']))
504
505     @staticmethod
506     def install_odl_features(node, odl_name, path, *features):
507         """Install required features on a running ODL client.
508
509         :param node: Honeycomb node.
510         :param odl_name: Name of ODL client version to use.
511         :param path: Path to ODL client on node.
512         :param features: Optional, list of additional features to install.
513         :type node: dict
514         :type odl_name: str
515         :type path: str
516         :type features: list
517         """
518
519         ssh = SSH()
520         ssh.connect(node)
521
522         auth = "-u karaf"
523         if odl_name.lower() == "oxygen" or odl_name.lower() == "fluorine":
524             auth = "-u karaf -p karaf"
525
526         cmd = "{path}/*karaf*/bin/client {auth} feature:install " \
527               "odl-restconf-all " \
528               "odl-netconf-connector-all " \
529               "odl-netconf-topology".format(path=path, auth=auth)
530         for feature in features:
531             cmd += " {0}".format(feature)
532
533         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=250)
534
535         if int(ret_code) != 0:
536             raise HoneycombError("Feature install did not succeed.")
537
538     @staticmethod
539     def check_odl_startup_state(node):
540         """Check the status of ODL client startup.
541
542         :param node: Honeycomb node.
543         :param node: dict
544         :returns: True when ODL is started.
545         :rtype: bool
546         :raises HoneycombError: When the response is not code 200: OK.
547         """
548
549         path = HcUtil.read_path_from_url_file(
550             "odl_client/odl_netconf_connector")
551         expected_status_codes = (HTTPCodes.UNAUTHORIZED,
552                                  HTTPCodes.FORBIDDEN,
553                                  HTTPCodes.NOT_FOUND,
554                                  HTTPCodes.SERVICE_UNAVAILABLE,
555                                  HTTPCodes.INTERNAL_SERVER_ERROR)
556
557         status_code, _ = HTTPRequest.get(node, path, timeout=10,
558                                          enable_logging=False)
559         if status_code == HTTPCodes.OK:
560             logger.info("ODL client on node {0} is up and running".
561                         format(node['host']))
562         elif status_code in expected_status_codes:
563             if status_code == HTTPCodes.UNAUTHORIZED:
564                 logger.info('Unauthorized. If this triggers keyword '
565                             'timeout, verify username and password.')
566             raise HoneycombError('ODL client on node {0} running but '
567                                  'not yet ready.'.format(node['host']),
568                                  enable_logging=False)
569         else:
570             raise HoneycombError('Unexpected return code: {0}.'.
571                                  format(status_code))
572         return True
573
574     @staticmethod
575     def check_odl_shutdown_state(node):
576         """Check the status of ODL client shutdown.
577
578         :param node: Honeycomb node.
579         :type node: dict
580         :returns: True when ODL is stopped.
581         :rtype: bool
582         :raises HoneycombError: When the response is not code 200: OK.
583         """
584
585         cmd = "pgrep -f karaf"
586         path = HcUtil.read_path_from_url_file(
587             "odl_client/odl_netconf_connector")
588
589         try:
590             HTTPRequest.get(node, path, timeout=10, enable_logging=False)
591             raise HoneycombError("ODL client is still running.")
592         except HTTPRequestError:
593             logger.debug("Connection refused, checking process state....")
594             ssh = SSH()
595             ssh.connect(node)
596             ret_code, _, _ = ssh.exec_command(cmd)
597             if ret_code == 0:
598                 raise HoneycombError("ODL client is still running.")
599
600         return True
601
602     @staticmethod
603     def mount_honeycomb_on_odl(node):
604         """Tell ODL client to mount Honeycomb instance over netconf.
605
606         :param node: Honeycomb node.
607         :type node: dict
608         :raises HoneycombError: When the response is not code 200: OK.
609         """
610
611         path = HcUtil.read_path_from_url_file(
612             "odl_client/odl_netconf_connector")
613
614         url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC,
615                                     "odl_client/mount_honeycomb.json")
616
617         with open(url_file) as template:
618             data = template.read()
619
620         data = loads(data)
621
622         status_code, _ = HTTPRequest.post(
623             node,
624             path,
625             headers={"Content-Type": "application/json",
626                      "Accept": "text/plain"},
627             json=data,
628             timeout=10,
629             enable_logging=False)
630
631         if status_code == HTTPCodes.OK:
632             logger.info("ODL mount point configured successfully.")
633         elif status_code == HTTPCodes.CONFLICT:
634             logger.info("ODL mount point was already configured.")
635         else:
636             raise HoneycombError('Mount point configuration not successful')
637
638     @staticmethod
639     def stop_odl_client(node, path):
640         """Stop ODL client service on the specified node.
641
642         :param node: Node to start ODL client on.
643         :param path: Path to ODL client.
644         :type node: dict
645         :type path: str
646         :raises HoneycombError: If ODL client fails to stop.
647         """
648
649         ssh = SSH()
650         ssh.connect(node)
651
652         cmd = "{0}/*karaf*/bin/stop".format(path)
653
654         ssh = SSH()
655         ssh.connect(node)
656         ret_code, _, _ = ssh.exec_command_sudo(cmd)
657         if int(ret_code) != 0:
658             logger.debug("ODL Client refused to shut down.")
659             cmd = "pkill -f 'karaf'"
660             (ret_code, _, _) = ssh.exec_command_sudo(cmd)
661             if int(ret_code) != 0:
662                 raise HoneycombError('Node {0} failed to stop ODL.'.
663                                      format(node['host']))
664
665         logger.info("ODL client service stopped.")
666
667     @staticmethod
668     def set_static_arp(node, ip_address, mac_address):
669         """Configure a static ARP entry using arp.
670
671         :param node: Node in topology.
672         :param ip_address: IP address for the entry.
673         :param mac_address: MAC adddress for the entry.
674         :type node: dict
675         :type ip_address: str
676         :type mac_address: str
677         :raises RuntimeError: If the operation fails.
678         """
679
680         ssh = SSH()
681         ssh.connect(node)
682         ret_code, _, _ = ssh.exec_command_sudo("arp -s {0} {1}".format(
683             ip_address, mac_address))
684
685         if ret_code != 0:
686             raise RuntimeError("Failed to configure static ARP adddress.")
687
688
689 class HoneycombStartupConfig(object):
690     """Generator for Honeycomb startup configuration.
691     """
692     def __init__(self):
693         """Initializer."""
694
695         self.template = """#!/bin/sh -
696         STATUS=100
697
698         while [ $STATUS -eq 100 ]
699         do
700           {java_call} -jar $(dirname $0)/{jar_filename}
701           STATUS=$?
702           echo "Honeycomb exited with status: $STATUS"
703           if [ $STATUS -eq 100 ]
704           then
705             echo "Restarting..."
706           fi
707         done
708         """
709
710         self.java_call = "{scheduler} {affinity} java{jit_mode}{params}"
711
712         self.scheduler = ""
713         self.core_affinity = ""
714         self.jit_mode = ""
715         self.params = ""
716         self.numa = ""
717
718         self.config = ""
719         self.ssh = SSH()
720
721     def apply_config(self, node):
722         """Generate configuration file /opt/honeycomb/honeycomb on the specified
723         node.
724
725         :param node: Honeycomb node.
726         :type node: dict
727         """
728
729         self.ssh.connect(node)
730         _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar")
731
732         java_call = self.java_call.format(scheduler=self.scheduler,
733                                           affinity=self.core_affinity,
734                                           jit_mode=self.jit_mode,
735                                           params=self.params)
736         self.config = self.template.format(java_call=java_call,
737                                            jar_filename=filename)
738
739         self.ssh.connect(node)
740         cmd = "echo '{config}' > /tmp/honeycomb " \
741               "&& chmod +x /tmp/honeycomb " \
742               "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".\
743             format(config=self.config)
744         self.ssh.exec_command(cmd)
745
746     def set_cpu_scheduler(self, scheduler="FIFO"):
747         """Use alternate CPU scheduler.
748
749         Note: OTHER scheduler doesn't load-balance over isolcpus.
750
751         :param scheduler: CPU scheduler to use.
752         :type scheduler: str
753         """
754
755         schedulers = {"FIFO": "-f 99",  # First In, First Out
756                       "RR": "-r 99",  # Round Robin
757                       "OTHER": "-o",  # Ubuntu default
758                      }
759         self.scheduler = "chrt {0}".format(schedulers[scheduler])
760
761     def set_cpu_core_affinity(self, low, high=None):
762         """Set core affinity for the honeycomb process and subprocesses.
763
764         :param low: Lowest core ID number.
765         :param high: Highest core ID number. Leave empty to use a single core.
766         :type low: int
767         :type high: int
768         """
769
770         self.core_affinity = "taskset -c {low}-{high}".format(
771             low=low, high=high if high else low)
772
773     def set_jit_compiler_mode(self, jit_mode):
774         """Set running mode for Java's JIT compiler.
775
776         :param jit_mode: Desiret JIT mode.
777         :type jit_mode: str
778         """
779
780         modes = {"client": " -client",  # Default
781                  "server": " -server",  # Higher performance but longer warmup
782                  "classic": " -classic"  # Disables JIT compiler
783                 }
784
785         self.jit_mode = modes[jit_mode]
786
787     def set_memory_size(self, mem_min, mem_max=None):
788         """Set minimum and maximum memory use for the JVM.
789
790         :param mem_min: Minimum amount of memory (MB).
791         :param mem_max: Maximum amount of memory (MB). Default is 4 times
792             minimum value.
793         :type mem_min: int
794         :type mem_max: int
795         """
796
797         self.params += " -Xms{min}m -Xmx{max}m".format(
798             min=mem_min, max=mem_max if mem_max else mem_min*4)
799
800     def set_metaspace_size(self, mem_min, mem_max=None):
801         """Set minimum and maximum memory used for class metadata in the JVM.
802
803         :param mem_min: Minimum metaspace size (MB).
804         :param mem_max: Maximum metaspace size (MB). Defailt is 4 times
805             minimum value.
806         :type mem_min: int
807         :type mem_max: int
808         """
809
810         self.params += " -XX:MetaspaceSize={min}m " \
811                        "-XX:MaxMetaspaceSize={max}m".format(
812                            min=mem_min, max=mem_max if mem_max else mem_min*4)
813
814     def set_numa_optimization(self):
815         """Use optimization of memory use and garbage collection for NUMA
816         architectures."""
817
818         self.params += " -XX:+UseNUMA -XX:+UseParallelGC"
819
820     def set_ssh_security_provider(self):
821         """Disables BouncyCastle for SSHD."""
822         # Workaround for issue described in:
823         # https://wiki.fd.io/view/Honeycomb/Releases/1609/Honeycomb_and_ODL
824
825         self.params += " -Dorg.apache.sshd.registerBouncyCastle=false"