d4175b13e420a81a8729598f296acd86752b05d3
[csit.git] / resources / libraries / python / honeycomb / HoneycombSetup.py
1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
5 #
6 #     http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
13
14 """Implementation of keywords for Honeycomb setup."""
15
16 from json import loads
17 from time import time, sleep
18
19 from ipaddress import IPv6Address, AddressValueError
20
21 from robot.api import logger
22
23 from resources.libraries.python.HTTPRequest import HTTPRequest, HTTPCodes, \
24     HTTPRequestError
25 from resources.libraries.python.constants import Constants as Const
26 from resources.libraries.python.honeycomb.HoneycombUtil import HoneycombError
27 from resources.libraries.python.honeycomb.HoneycombUtil \
28     import HoneycombUtil as HcUtil
29 from resources.libraries.python.ssh import SSH
30 from resources.libraries.python.topology import NodeType
31
32
33 class HoneycombSetup(object):
34     """Implements keywords for Honeycomb setup.
35
36     The keywords implemented in this class make possible to:
37     - start Honeycomb,
38     - stop Honeycomb,
39     - check the Honeycomb start-up state,
40     - check the Honeycomb shutdown state,
41     - add VPP to the topology.
42     """
43
44     def __init__(self):
45         pass
46
47     @staticmethod
48     def start_honeycomb_on_duts(*nodes):
49         """Start Honeycomb on specified DUT nodes.
50
51         This keyword starts the Honeycomb service on specified DUTs.
52         The keyword just starts the Honeycomb and does not check its startup
53         state. Use the keyword "Check Honeycomb Startup State" to check if the
54         Honeycomb is up and running.
55         Honeycomb must be installed in "/opt" directory, otherwise the start
56         will fail.
57         :param nodes: List of nodes to start Honeycomb on.
58         :type nodes: list
59         :raises HoneycombError: If Honeycomb fails to start.
60         """
61
62         HoneycombSetup.print_environment(nodes)
63
64         cmd = "sudo service honeycomb start"
65
66         for node in nodes:
67             if node['type'] == NodeType.DUT:
68                 logger.console(
69                     "\n(re)Starting Honeycomb service on node {0}".format(
70                         node["host"]))
71                 ssh = SSH()
72                 ssh.connect(node)
73                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
74                 if int(ret_code) != 0:
75                     raise HoneycombError('Node {0} failed to start Honeycomb.'.
76                                          format(node['host']))
77                 else:
78                     logger.info("Starting the Honeycomb service on node {0} is "
79                                 "in progress ...".format(node['host']))
80
81     @staticmethod
82     def stop_honeycomb_on_duts(*nodes):
83         """Stop the Honeycomb service on specified DUT nodes.
84
85         This keyword stops the Honeycomb service on specified nodes. It just
86         stops the Honeycomb and does not check its shutdown state. Use the
87         keyword "Check Honeycomb Shutdown State" to check if Honeycomb has
88         stopped.
89         :param nodes: List of nodes to stop Honeycomb on.
90         :type nodes: list
91         :raises HoneycombError: If Honeycomb failed to stop.
92         """
93
94         cmd = "sudo service honeycomb stop"
95         errors = []
96
97         for node in nodes:
98             if node['type'] == NodeType.DUT:
99                 logger.console(
100                     "\nShutting down Honeycomb service on node {0}".format(
101                         node["host"]))
102                 ssh = SSH()
103                 ssh.connect(node)
104                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
105                 if int(ret_code) != 0:
106                     errors.append(node['host'])
107                 else:
108                     logger.info("Stopping the Honeycomb service on node {0} is "
109                                 "in progress ...".format(node['host']))
110         if errors:
111             raise HoneycombError('Node(s) {0} failed to stop Honeycomb.'.
112                                  format(errors))
113
114     @staticmethod
115     def restart_honeycomb_on_dut(node):
116         """Restart Honeycomb on specified DUT nodes.
117
118         This keyword restarts the Honeycomb service on specified DUTs. Use the
119         keyword "Check Honeycomb Startup State" to check if the Honeycomb is up
120         and running.
121
122         :param node: Node to restart Honeycomb on.
123         :type node: dict
124         :raises HoneycombError: If Honeycomb fails to start.
125         """
126
127         logger.console(
128             "\n(re)Starting Honeycomb service on node {0}".format(node["host"]))
129
130         cmd = "sudo service honeycomb restart"
131
132         ssh = SSH()
133         ssh.connect(node)
134         (ret_code, _, _) = ssh.exec_command_sudo(cmd)
135         if int(ret_code) != 0:
136             raise HoneycombError('Node {0} failed to restart Honeycomb.'.
137                                  format(node['host']))
138         else:
139             logger.info(
140                 "Honeycomb service restart is in progress on node {0}".format(
141                     node['host']))
142
143     @staticmethod
144     def check_honeycomb_startup_state(node, timeout=360, retries=20,
145                                       interval=15):
146         """Repeatedly check the status of Honeycomb startup until it is fully
147         started or until timeout or max retries is reached.
148
149         :param node: Honeycomb node.
150         :param timeout: Timeout value in seconds.
151         :param retries: Max number of retries.
152         :param interval: Interval between checks, in seconds.
153         :type node: dict
154         :type timeout: int
155         :type retries: int
156         :type interval: int
157         :raises HoneycombError: If the Honeycomb process IP cannot be found,
158         or if timeout or number of retries is exceeded."""
159
160         ssh = SSH()
161         ssh.connect(node)
162
163         count = 0
164         start = time()
165         while time() - start < timeout and count < retries:
166             count += 1
167
168             try:
169                 status_code_version, _ = HcUtil.get_honeycomb_data(
170                     node, "oper_vpp_version")
171                 status_code_if_cfg, _ = HcUtil.get_honeycomb_data(
172                     node, "config_vpp_interfaces")
173                 status_code_if_oper, _ = HcUtil.get_honeycomb_data(
174                     node, "oper_vpp_interfaces")
175             except HTTPRequestError:
176                 sleep(interval)
177                 continue
178             if status_code_if_cfg == HTTPCodes.OK\
179                     and status_code_if_cfg == HTTPCodes.OK\
180                     and status_code_if_oper == HTTPCodes.OK:
181                 logger.info("Check successful, Honeycomb is up and running.")
182                 break
183             else:
184                 logger.debug(
185                     "Attempt ${count} failed on Restconf check. Status codes:\n"
186                     "Version: {version}\n"
187                     "Interface config: {if_cfg}\n"
188                     "Interface operational: {if_oper}".format(
189                         count=count,
190                         version=status_code_version,
191                         if_cfg=status_code_if_cfg,
192                         if_oper=status_code_if_oper))
193                 sleep(interval)
194                 continue
195         else:
196             _, vpp_status, _ = ssh.exec_command("sudo service vpp status")
197             raise HoneycombError(
198                 "Timeout or max retries exceeded. Status of VPP:\n"
199                 "{vpp_status}".format(vpp_status=vpp_status))
200
201     @staticmethod
202     def check_honeycomb_shutdown_state(node):
203         """Check state of Honeycomb service during shutdown on specified nodes.
204
205         Honeycomb nodes reply with connection refused or the following status
206         codes depending on shutdown progress: codes 200, 404.
207
208         :param node: List of DUT nodes stopping Honeycomb.
209         :type node: dict
210         :return: True if all GETs fail to connect.
211         :rtype bool
212         """
213         cmd = "pgrep honeycomb"
214
215         ssh = SSH()
216         ssh.connect(node)
217         (ret_code, _, _) = ssh.exec_command_sudo(cmd)
218         if ret_code == 0:
219             raise HoneycombError('Honeycomb on node {0} is still '
220                                  'running.'.format(node['host']),
221                                  enable_logging=False)
222         else:
223             logger.info("Honeycomb on node {0} has stopped".
224                         format(node['host']))
225         return True
226
227     @staticmethod
228     def configure_restconf_binding_address(node):
229         """Configure Honeycomb to accept restconf requests from all IP
230         addresses. IP version is determined by node data.
231
232          :param node: Information about a DUT node.
233          :type node: dict
234          :raises HoneycombError: If the configuration could not be changed.
235          """
236
237         find = "restconf-binding-address"
238         try:
239             IPv6Address(unicode(node["host"]))
240             # if management IP of the node is in IPv6 format
241             replace = '\\"restconf-binding-address\\": \\"0::0\\",'
242         except (AttributeError, AddressValueError):
243             replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",'
244
245         argument = '"/{0}/c\\ {1}"'.format(find, replace)
246         path = "{0}/config/restconf.json".format(Const.REMOTE_HC_DIR)
247         command = "sed -i {0} {1}".format(argument, path)
248
249         ssh = SSH()
250         ssh.connect(node)
251         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
252         if ret_code != 0:
253             raise HoneycombError("Failed to modify configuration on "
254                                  "node {0}, {1}".format(node, stderr))
255
256     @staticmethod
257     def configure_jvpp_timeout(node, timeout=10):
258         """Configure timeout value for Java API commands Honeycomb sends to VPP.
259
260          :param node: Information about a DUT node.
261          :param timeout: Timeout value in seconds.
262          :type node: dict
263          :type timeout: int
264          :raises HoneycombError: If the configuration could not be changed.
265          """
266
267         find = "jvpp-request-timeout"
268         replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout)
269
270         argument = '"/{0}/c\\ {1}"'.format(find, replace)
271         path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR)
272         command = "sed -i {0} {1}".format(argument, path)
273
274         ssh = SSH()
275         ssh.connect(node)
276         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
277         if ret_code != 0:
278             raise HoneycombError("Failed to modify configuration on "
279                                  "node {0}, {1}".format(node, stderr))
280
281     @staticmethod
282     def print_environment(nodes):
283         """Print information about the nodes to log. The information is defined
284         by commands in cmds tuple at the beginning of this method.
285
286         :param nodes: List of DUT nodes to get information about.
287         :type nodes: list
288         """
289
290         # TODO: When everything is set and running in VIRL env, transform this
291         # method to a keyword checking the environment.
292
293         cmds = ("uname -a",
294                 "df -lh",
295                 "echo $JAVA_HOME",
296                 "echo $PATH",
297                 "which java",
298                 "java -version",
299                 "dpkg --list | grep openjdk",
300                 "ls -la /opt/honeycomb",
301                 "cat /opt/honeycomb/modules/*module-config")
302
303         for node in nodes:
304             if node['type'] == NodeType.DUT:
305                 logger.info("Checking node {} ...".format(node['host']))
306                 for cmd in cmds:
307                     logger.info("Command: {}".format(cmd))
308                     ssh = SSH()
309                     ssh.connect(node)
310                     ssh.exec_command_sudo(cmd)
311
312     @staticmethod
313     def print_ports(node):
314         """Uses "sudo netstat -anp | grep java" to print port where a java
315         application listens.
316
317         :param node: Honeycomb node where we want to print the ports.
318         :type node: dict
319         """
320
321         cmds = ("netstat -anp | grep java",
322                 "ps -ef | grep [h]oneycomb")
323
324         logger.info("Checking node {} ...".format(node['host']))
325         for cmd in cmds:
326             logger.info("Command: {}".format(cmd))
327             ssh = SSH()
328             ssh.connect(node)
329             ssh.exec_command_sudo(cmd)
330
331     @staticmethod
332     def configure_log_level(node, level):
333         """Set Honeycomb logging to the specified level.
334
335         :param node: Honeycomb node.
336         :param level: Log level (INFO, DEBUG, TRACE).
337         :type node: dict
338         :type level: str
339         """
340
341         find = 'logger name=\\"io.fd\\"'
342         replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level)
343
344         argument = '"/{0}/c\\ {1}"'.format(find, replace)
345         path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR)
346         command = "sed -i {0} {1}".format(argument, path)
347
348         ssh = SSH()
349         ssh.connect(node)
350         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
351         if ret_code != 0:
352             raise HoneycombError("Failed to modify configuration on "
353                                  "node {0}, {1}".format(node, stderr))
354
355     @staticmethod
356     def manage_honeycomb_features(node, feature, disable=False):
357         """Configure Honeycomb to use features that are disabled by default, or
358         disable previously enabled features.
359
360         ..Note:: If the module is not enabled in VPP, Honeycomb will
361         be unable to establish VPP connection.
362
363         :param node: Honeycomb node.
364         :param feature: Feature to enable.
365         :param disable: Disable the specified feature instead of enabling it.
366         :type node: dict
367         :type feature: string
368         :type disable: bool
369         :raises HoneycombError: If the configuration could not be changed.
370          """
371
372         disabled_features = {
373             "NSH": ["io.fd.hc2vpp.vppnsh.impl.VppNshModule"],
374             "BGP": ["io.fd.hc2vpp.bgp.inet.BgpInetModule",
375                     "io.fd.honeycomb.infra.bgp.BgpModule",
376                     "io.fd.honeycomb.infra.bgp.BgpReadersModule",
377                     "io.fd.honeycomb.infra.bgp.BgpWritersModule",
378                     "io.fd.honeycomb.northbound.bgp.extension.InetModule",
379                     "io.fd.honeycomb.northbound.bgp.extension.EvpnModule",
380                     "io.fd.honeycomb.northbound.bgp.extension.L3VpnV4Module",
381                     "io.fd.honeycomb.northbound.bgp.extension.L3VpnV6Module",
382                     "io.fd.honeycomb.northbound.bgp.extension."
383                     "LabeledUnicastModule",
384                     "io.fd.honeycomb.northbound.bgp.extension.LinkstateModule"]
385         }
386
387         ssh = SSH()
388         ssh.connect(node)
389
390         if feature in disabled_features.keys():
391             # for every module, uncomment by replacing the entire line
392             for item in disabled_features[feature]:
393                 find = replace = "{0}".format(item)
394                 if disable:
395                     replace = "// {0}".format(find)
396
397                 argument = '"/{0}/c\\ {1}"'.format(find, replace)
398                 path = "{0}/modules/*module-config"\
399                     .format(Const.REMOTE_HC_DIR)
400                 command = "sed -i {0} {1}".format(argument, path)
401
402                 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
403                 if ret_code != 0:
404                     raise HoneycombError("Failed to modify configuration on "
405                                          "node {0}, {1}".format(node, stderr))
406         else:
407             raise HoneycombError(
408                 "Unrecognized feature {0}.".format(feature))
409
410     @staticmethod
411     def copy_java_libraries(node):
412         """Copy Java libraries installed by vpp-api-java package to honeycomb
413         lib folder.
414
415         This is a (temporary?) workaround for jvpp version mismatches.
416
417         :param node: Honeycomb node
418         :type node: dict
419         """
420
421         ssh = SSH()
422         ssh.connect(node)
423         (_, stdout, _) = ssh.exec_command_sudo(
424             "ls /usr/share/java | grep ^jvpp-*")
425
426         files = stdout.split("\n")[:-1]
427         for item in files:
428             # example filenames:
429             # jvpp-registry-17.04.jar
430             # jvpp-core-17.04.jar
431
432             parts = item.split("-")
433             version = "{0}-SNAPSHOT".format(parts[2][:5])
434             artifact_id = "{0}-{1}".format(parts[0], parts[1])
435
436             directory = "{0}/lib/io/fd/vpp/{1}/{2}".format(
437                 Const.REMOTE_HC_DIR, artifact_id, version)
438             cmd = "sudo mkdir -p {0}; " \
439                   "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format(
440                       directory, item, artifact_id, version)
441
442             (ret_code, _, stderr) = ssh.exec_command(cmd)
443             if ret_code != 0:
444                 raise HoneycombError("Failed to copy JVPP libraries on "
445                                      "node {0}, {1}".format(node, stderr))
446
447     @staticmethod
448     def copy_odl_client(node, odl_name, src_path, dst_path):
449         """Copy ODL Client from source path to destination path.
450
451         :param node: Honeycomb node.
452         :param odl_name: Name of ODL client version to use.
453         :param src_path: Source Path where to find ODl client.
454         :param dst_path: Destination path.
455         :type node: dict
456         :type odl_name: str
457         :type src_path: str
458         :type dst_path: str
459         :raises HoneycombError: If the operation fails.
460         """
461
462         ssh = SSH()
463         ssh.connect(node)
464
465         cmd = "sudo rm -rf {dst}/*karaf_{odl_name} && " \
466               "cp -r {src}/*karaf_{odl_name}* {dst}".format(
467                   src=src_path, odl_name=odl_name, dst=dst_path)
468
469         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=180)
470         if int(ret_code) != 0:
471             raise HoneycombError(
472                 "Failed to copy ODL client on node {0}".format(node["host"]))
473
474     @staticmethod
475     def setup_odl_client(node, path):
476         """Start ODL client on the specified node.
477
478         Karaf should be located in the provided path, and VPP and Honeycomb
479         should already be running, otherwise the start will fail.
480         :param node: Node to start ODL client on.
481         :param path: Path to ODL client on node.
482         :type node: dict
483         :type path: str
484         :raises HoneycombError: If Honeycomb fails to start.
485         """
486
487         logger.console("\nStarting ODL client ...")
488         ssh = SSH()
489         ssh.connect(node)
490
491         cmd = "{path}/*karaf*/bin/start clean".format(path=path)
492         ret_code, _, _ = ssh.exec_command_sudo(cmd)
493
494         if int(ret_code) != 0:
495             raise HoneycombError('Node {0} failed to start ODL.'.
496                                  format(node['host']))
497         else:
498             logger.info("Starting the ODL client on node {0} is "
499                         "in progress ...".format(node['host']))
500
501     @staticmethod
502     def install_odl_features(node, path, *features):
503         """Install required features on a running ODL client.
504
505         :param node: Honeycomb node.
506         :param path: Path to ODL client on node.
507         :param features: Optional, list of additional features to install.
508         :type node: dict
509         :type path: str
510         :type features: list
511         """
512
513         ssh = SSH()
514         ssh.connect(node)
515
516         cmd = "{path}/*karaf*/bin/client -u karaf feature:install " \
517               "odl-restconf-all " \
518               "odl-netconf-connector-all " \
519               "odl-netconf-topology".format(path=path)
520         for feature in features:
521             cmd += " {0}".format(feature)
522
523         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=250)
524
525         if int(ret_code) != 0:
526             raise HoneycombError("Feature install did not succeed.")
527
528     @staticmethod
529     def check_odl_startup_state(node):
530         """Check the status of ODL client startup.
531
532         :param node: Honeycomb node.
533         :param node: dict
534         :returns: True when ODL is started.
535         :rtype: bool
536         :raises HoneycombError: When the response is not code 200: OK.
537         """
538
539         path = HcUtil.read_path_from_url_file(
540             "odl_client/odl_netconf_connector")
541         expected_status_codes = (HTTPCodes.UNAUTHORIZED,
542                                  HTTPCodes.FORBIDDEN,
543                                  HTTPCodes.NOT_FOUND,
544                                  HTTPCodes.SERVICE_UNAVAILABLE,
545                                  HTTPCodes.INTERNAL_SERVER_ERROR)
546
547         status_code, _ = HTTPRequest.get(node, path, timeout=10,
548                                          enable_logging=False)
549         if status_code == HTTPCodes.OK:
550             logger.info("ODL client on node {0} is up and running".
551                         format(node['host']))
552         elif status_code in expected_status_codes:
553             if status_code == HTTPCodes.UNAUTHORIZED:
554                 logger.info('Unauthorized. If this triggers keyword '
555                             'timeout, verify username and password.')
556             raise HoneycombError('ODL client on node {0} running but '
557                                  'not yet ready.'.format(node['host']),
558                                  enable_logging=False)
559         else:
560             raise HoneycombError('Unexpected return code: {0}.'.
561                                  format(status_code))
562         return True
563
564     @staticmethod
565     def check_odl_shutdown_state(node):
566         """Check the status of ODL client shutdown.
567
568         :param node: Honeycomb node.
569         :type node: dict
570         :returns: True when ODL is stopped.
571         :rtype: bool
572         :raises HoneycombError: When the response is not code 200: OK.
573         """
574
575         cmd = "pgrep -f karaf"
576         path = HcUtil.read_path_from_url_file(
577             "odl_client/odl_netconf_connector")
578
579         try:
580             HTTPRequest.get(node, path, timeout=10, enable_logging=False)
581             raise HoneycombError("ODL client is still running.")
582         except HTTPRequestError:
583             logger.debug("Connection refused, checking process state....")
584             ssh = SSH()
585             ssh.connect(node)
586             ret_code, _, _ = ssh.exec_command(cmd)
587             if ret_code == 0:
588                 raise HoneycombError("ODL client is still running.")
589
590         return True
591
592     @staticmethod
593     def mount_honeycomb_on_odl(node):
594         """Tell ODL client to mount Honeycomb instance over netconf.
595
596         :param node: Honeycomb node.
597         :type node: dict
598         :raises HoneycombError: When the response is not code 200: OK.
599         """
600
601         path = HcUtil.read_path_from_url_file(
602             "odl_client/odl_netconf_connector")
603
604         url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC,
605                                     "odl_client/mount_honeycomb.json")
606
607         with open(url_file) as template:
608             data = template.read()
609
610         data = loads(data)
611
612         status_code, _ = HTTPRequest.post(
613             node,
614             path,
615             headers={"Content-Type": "application/json",
616                      "Accept": "text/plain"},
617             json=data,
618             timeout=10,
619             enable_logging=False)
620
621         if status_code == HTTPCodes.OK:
622             logger.info("ODL mount point configured successfully.")
623         elif status_code == HTTPCodes.CONFLICT:
624             logger.info("ODL mount point was already configured.")
625         else:
626             raise HoneycombError('Mount point configuration not successful')
627
628     @staticmethod
629     def stop_odl_client(node, path):
630         """Stop ODL client service on the specified node.
631
632         :param node: Node to start ODL client on.
633         :param path: Path to ODL client.
634         :type node: dict
635         :type path: str
636         :raises HoneycombError: If ODL client fails to stop.
637         """
638
639         ssh = SSH()
640         ssh.connect(node)
641
642         cmd = "{0}/*karaf*/bin/stop".format(path)
643
644         ssh = SSH()
645         ssh.connect(node)
646         ret_code, _, _ = ssh.exec_command_sudo(cmd)
647         if int(ret_code) != 0:
648             logger.debug("ODL Client refused to shut down.")
649             cmd = "pkill -f 'karaf'"
650             (ret_code, _, _) = ssh.exec_command_sudo(cmd)
651             if int(ret_code) != 0:
652                 raise HoneycombError('Node {0} failed to stop ODL.'.
653                                      format(node['host']))
654
655         logger.info("ODL client service stopped.")
656
657     @staticmethod
658     def set_static_arp(node, ip_address, mac_address):
659         """Configure a static ARP entry using arp.
660
661         :param node: Node in topology.
662         :param ip_address: IP address for the entry.
663         :param mac_address: MAC adddress for the entry.
664         :type node: dict
665         :type ip_address: str
666         :type mac_address: str
667         :raises RuntimeError: If the operation fails.
668         """
669
670         ssh = SSH()
671         ssh.connect(node)
672         ret_code, _, _ = ssh.exec_command_sudo("arp -s {0} {1}".format(
673             ip_address, mac_address))
674
675         if ret_code != 0:
676             raise RuntimeError("Failed to configure static ARP adddress.")
677
678
679 class HoneycombStartupConfig(object):
680     """Generator for Honeycomb startup configuration.
681     """
682     def __init__(self):
683         """Initializer."""
684
685         self.template = """#!/bin/sh -
686         STATUS=100
687
688         while [ $STATUS -eq 100 ]
689         do
690           {java_call} -jar $(dirname $0)/{jar_filename}
691           STATUS=$?
692           echo "Honeycomb exited with status: $STATUS"
693           if [ $STATUS -eq 100 ]
694           then
695             echo "Restarting..."
696           fi
697         done
698         """
699
700         self.java_call = "{scheduler} {affinity} java{jit_mode}{params}"
701
702         self.scheduler = ""
703         self.core_affinity = ""
704         self.jit_mode = ""
705         self.params = ""
706         self.numa = ""
707
708         self.config = ""
709         self.ssh = SSH()
710
711     def apply_config(self, node):
712         """Generate configuration file /opt/honeycomb/honeycomb on the specified
713          node.
714
715          :param node: Honeycomb node.
716          :type node: dict
717          """
718
719         self.ssh.connect(node)
720         _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar")
721
722         java_call = self.java_call.format(scheduler=self.scheduler,
723                                           affinity=self.core_affinity,
724                                           jit_mode=self.jit_mode,
725                                           params=self.params)
726         self.config = self.template.format(java_call=java_call,
727                                            jar_filename=filename)
728
729         self.ssh.connect(node)
730         cmd = "echo '{config}' > /tmp/honeycomb " \
731               "&& chmod +x /tmp/honeycomb " \
732               "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".\
733             format(config=self.config)
734         self.ssh.exec_command(cmd)
735
736     def set_cpu_scheduler(self, scheduler="FIFO"):
737         """Use alternate CPU scheduler.
738
739         Note: OTHER scheduler doesn't load-balance over isolcpus.
740
741         :param scheduler: CPU scheduler to use.
742         :type scheduler: str
743         """
744
745         schedulers = {"FIFO": "-f 99",  # First In, First Out
746                       "RR": "-r 99",  # Round Robin
747                       "OTHER": "-o",  # Ubuntu default
748                      }
749         self.scheduler = "chrt {0}".format(schedulers[scheduler])
750
751     def set_cpu_core_affinity(self, low, high=None):
752         """Set core affinity for the honeycomb process and subprocesses.
753
754         :param low: Lowest core ID number.
755         :param high: Highest core ID number. Leave empty to use a single core.
756         :type low: int
757         :type high: int
758         """
759
760         self.core_affinity = "taskset -c {low}-{high}".format(
761             low=low, high=high if high else low)
762
763     def set_jit_compiler_mode(self, jit_mode):
764         """Set running mode for Java's JIT compiler.
765
766         :param jit_mode: Desiret JIT mode.
767         :type jit_mode: str
768         """
769
770         modes = {"client": " -client",  # Default
771                  "server": " -server",  # Higher performance but longer warmup
772                  "classic": " -classic"  # Disables JIT compiler
773                 }
774
775         self.jit_mode = modes[jit_mode]
776
777     def set_memory_size(self, mem_min, mem_max=None):
778         """Set minimum and maximum memory use for the JVM.
779
780         :param mem_min: Minimum amount of memory (MB).
781         :param mem_max: Maximum amount of memory (MB). Default is 4 times
782         minimum value.
783         :type mem_min: int
784         :type mem_max: int
785         """
786
787         self.params += " -Xms{min}m -Xmx{max}m".format(
788             min=mem_min, max=mem_max if mem_max else mem_min*4)
789
790     def set_metaspace_size(self, mem_min, mem_max=None):
791         """Set minimum and maximum memory used for class metadata in the JVM.
792
793         :param mem_min: Minimum metaspace size (MB).
794         :param mem_max: Maximum metaspace size (MB). Defailt is 4 times
795         minimum value.
796         :type mem_min: int
797         :type mem_max: int
798         """
799
800         self.params += " -XX:MetaspaceSize={min}m " \
801                        "-XX:MaxMetaspaceSize={max}m".format(
802                            min=mem_min, max=mem_max if mem_max else mem_min*4)
803
804     def set_numa_optimization(self):
805         """Use optimization of memory use and garbage collection for NUMA
806         architectures."""
807
808         self.params += " -XX:+UseNUMA -XX:+UseParallelGC"
809
810     def set_ssh_security_provider(self):
811         """Disables BouncyCastle for SSHD."""
812         # Workaround for issue described in:
813         # https://wiki.fd.io/view/Honeycomb/Releases/1609/Honeycomb_and_ODL
814
815         self.params += " -Dorg.apache.sshd.registerBouncyCastle=false"