HC Test: update ODL startup in hc2vpp jobs
[csit.git] / resources / libraries / python / honeycomb / HoneycombSetup.py
1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
5 #
6 #     http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
13
14 """Implementation of keywords for Honeycomb setup."""
15
16 from json import loads
17
18 from ipaddress import IPv6Address, AddressValueError
19
20 from robot.api import logger
21
22 from resources.libraries.python.HTTPRequest import HTTPRequest, HTTPCodes, \
23     HTTPRequestError
24 from resources.libraries.python.constants import Constants as Const
25 from resources.libraries.python.honeycomb.HoneycombUtil import HoneycombError
26 from resources.libraries.python.honeycomb.HoneycombUtil \
27     import HoneycombUtil as HcUtil
28 from resources.libraries.python.ssh import SSH
29 from resources.libraries.python.topology import NodeType
30
31
32 class HoneycombSetup(object):
33     """Implements keywords for Honeycomb setup.
34
35     The keywords implemented in this class make possible to:
36     - start Honeycomb,
37     - stop Honeycomb,
38     - check the Honeycomb start-up state,
39     - check the Honeycomb shutdown state,
40     - add VPP to the topology.
41     """
42
43     def __init__(self):
44         pass
45
46     @staticmethod
47     def start_honeycomb_on_duts(*nodes):
48         """Start Honeycomb on specified DUT nodes.
49
50         This keyword starts the Honeycomb service on specified DUTs.
51         The keyword just starts the Honeycomb and does not check its startup
52         state. Use the keyword "Check Honeycomb Startup State" to check if the
53         Honeycomb is up and running.
54         Honeycomb must be installed in "/opt" directory, otherwise the start
55         will fail.
56         :param nodes: List of nodes to start Honeycomb on.
57         :type nodes: list
58         :raises HoneycombError: If Honeycomb fails to start.
59         """
60
61         HoneycombSetup.print_environment(nodes)
62
63         logger.console("\n(re)Starting Honeycomb service ...")
64
65         cmd = "sudo service honeycomb start"
66
67         for node in nodes:
68             if node['type'] == NodeType.DUT:
69                 ssh = SSH()
70                 ssh.connect(node)
71                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
72                 if int(ret_code) != 0:
73                     raise HoneycombError('Node {0} failed to start Honeycomb.'.
74                                          format(node['host']))
75                 else:
76                     logger.info("Starting the Honeycomb service on node {0} is "
77                                 "in progress ...".format(node['host']))
78
79     @staticmethod
80     def stop_honeycomb_on_duts(*nodes):
81         """Stop the Honeycomb service on specified DUT nodes.
82
83         This keyword stops the Honeycomb service on specified nodes. It just
84         stops the Honeycomb and does not check its shutdown state. Use the
85         keyword "Check Honeycomb Shutdown State" to check if Honeycomb has
86         stopped.
87         :param nodes: List of nodes to stop Honeycomb on.
88         :type nodes: list
89         :raises HoneycombError: If Honeycomb failed to stop.
90         """
91         logger.console("\nShutting down Honeycomb service ...")
92
93         cmd = "sudo service honeycomb stop"
94         errors = []
95
96         for node in nodes:
97             if node['type'] == NodeType.DUT:
98                 ssh = SSH()
99                 ssh.connect(node)
100                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
101                 if int(ret_code) != 0:
102                     errors.append(node['host'])
103                 else:
104                     logger.info("Stopping the Honeycomb service on node {0} is "
105                                 "in progress ...".format(node['host']))
106         if errors:
107             raise HoneycombError('Node(s) {0} failed to stop Honeycomb.'.
108                                  format(errors))
109
110     @staticmethod
111     def restart_honeycomb_on_dut(node):
112         """Restart Honeycomb on specified DUT nodes.
113
114         This keyword restarts the Honeycomb service on specified DUTs. Use the
115         keyword "Check Honeycomb Startup State" to check if the Honeycomb is up
116         and running.
117
118         :param node: Node to restart Honeycomb on.
119         :type node: dict
120         :raises HoneycombError: If Honeycomb fails to start.
121         """
122
123         logger.console("\n(re)Starting Honeycomb service ...")
124
125         cmd = "sudo service honeycomb restart"
126
127         ssh = SSH()
128         ssh.connect(node)
129         (ret_code, _, _) = ssh.exec_command_sudo(cmd)
130         if int(ret_code) != 0:
131             raise HoneycombError('Node {0} failed to restart Honeycomb.'.
132                                  format(node['host']))
133         else:
134             logger.info(
135                 "Honeycomb service restart is in progress on node {0}".format(
136                     node['host']))
137
138     @staticmethod
139     def check_honeycomb_startup_state(*nodes):
140         """Check state of Honeycomb service during startup on specified nodes.
141
142         Reads html path from template file oper_vpp_version.url.
143
144         Honeycomb nodes reply with connection refused or the following status
145         codes depending on startup progress: codes 200, 401, 403, 404, 500, 503
146
147         :param nodes: List of DUT nodes starting Honeycomb.
148         :type nodes: list
149         :return: True if all GETs returned code 200(OK).
150         :rtype bool
151         """
152         path = HcUtil.read_path_from_url_file("oper_vpp_version")
153         expected_status_codes = (HTTPCodes.UNAUTHORIZED,
154                                  HTTPCodes.FORBIDDEN,
155                                  HTTPCodes.NOT_FOUND,
156                                  HTTPCodes.SERVICE_UNAVAILABLE,
157                                  HTTPCodes.INTERNAL_SERVER_ERROR)
158
159         for node in nodes:
160             if node['type'] == NodeType.DUT:
161                 HoneycombSetup.print_ports(node)
162                 try:
163                     status_code, _ = HTTPRequest.get(node, path,
164                                                      enable_logging=False)
165                 except HTTPRequestError:
166                     ssh = SSH()
167                     ssh.connect(node)
168                     ret_code, _, _ = ssh.exec_command_sudo(
169                         "tail -n 100 /var/log/syslog")
170                     if ret_code != 0:
171                         # It's probably Centos
172                         ssh.exec_command_sudo("tail -n 100 /var/log/messages")
173                     raise
174                 if status_code == HTTPCodes.OK:
175                     logger.info("Honeycomb on node {0} is up and running".
176                                 format(node['host']))
177                 elif status_code in expected_status_codes:
178                     if status_code == HTTPCodes.UNAUTHORIZED:
179                         logger.info('Unauthorized. If this triggers keyword '
180                                     'timeout, verify Honeycomb username and '
181                                     'password.')
182                     raise HoneycombError('Honeycomb on node {0} running but '
183                                          'not yet ready.'.format(node['host']),
184                                          enable_logging=False)
185                 else:
186                     raise HoneycombError('Unexpected return code: {0}.'.
187                                          format(status_code))
188
189                 status_code, _ = HcUtil.get_honeycomb_data(
190                     node, "config_vpp_interfaces")
191                 if status_code != HTTPCodes.OK:
192                     raise HoneycombError('Honeycomb on node {0} running but '
193                                          'not yet ready.'.format(node['host']),
194                                          enable_logging=False)
195         return True
196
197     @staticmethod
198     def check_honeycomb_shutdown_state(*nodes):
199         """Check state of Honeycomb service during shutdown on specified nodes.
200
201         Honeycomb nodes reply with connection refused or the following status
202         codes depending on shutdown progress: codes 200, 404.
203
204         :param nodes: List of DUT nodes stopping Honeycomb.
205         :type nodes: list
206         :return: True if all GETs fail to connect.
207         :rtype bool
208         """
209         cmd = "ps -ef | grep -v grep | grep honeycomb"
210         for node in nodes:
211             if node['type'] == NodeType.DUT:
212                 try:
213                     status_code, _ = HTTPRequest.get(node, '/index.html',
214                                                      enable_logging=False)
215                     if status_code == HTTPCodes.OK:
216                         raise HoneycombError('Honeycomb on node {0} is still '
217                                              'running.'.format(node['host']),
218                                              enable_logging=False)
219                     elif status_code == HTTPCodes.NOT_FOUND:
220                         raise HoneycombError('Honeycomb on node {0} is shutting'
221                                              ' down.'.format(node['host']),
222                                              enable_logging=False)
223                     else:
224                         raise HoneycombError('Unexpected return code: {0}.'.
225                                              format(status_code))
226                 except HTTPRequestError:
227                     logger.debug('Connection refused, checking the process '
228                                  'state ...')
229                     ssh = SSH()
230                     ssh.connect(node)
231                     (ret_code, _, _) = ssh.exec_command_sudo(cmd)
232                     if ret_code == 0:
233                         raise HoneycombError('Honeycomb on node {0} is still '
234                                              'running.'.format(node['host']),
235                                              enable_logging=False)
236                     else:
237                         logger.info("Honeycomb on node {0} has stopped".
238                                     format(node['host']))
239         return True
240
241     @staticmethod
242     def configure_restconf_binding_address(node):
243         """Configure Honeycomb to accept restconf requests from all IP
244         addresses. IP version is determined by node data.
245
246          :param node: Information about a DUT node.
247          :type node: dict
248          :raises HoneycombError: If the configuration could not be changed.
249          """
250
251         find = "restconf-binding-address"
252         try:
253             IPv6Address(unicode(node["host"]))
254             # if management IP of the node is in IPv6 format
255             replace = '\\"restconf-binding-address\\": \\"0::0\\",'
256         except (AttributeError, AddressValueError):
257             replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",'
258
259         argument = '"/{0}/c\\ {1}"'.format(find, replace)
260         path = "{0}/config/restconf.json".format(Const.REMOTE_HC_DIR)
261         command = "sed -i {0} {1}".format(argument, path)
262
263         ssh = SSH()
264         ssh.connect(node)
265         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
266         if ret_code != 0:
267             raise HoneycombError("Failed to modify configuration on "
268                                  "node {0}, {1}".format(node, stderr))
269
270     @staticmethod
271     def configure_jvpp_timeout(node, timeout=10):
272         """Configure timeout value for Java API commands Honeycomb sends to VPP.
273
274          :param node: Information about a DUT node.
275          :param timeout: Timeout value in seconds.
276          :type node: dict
277          :type timeout: int
278          :raises HoneycombError: If the configuration could not be changed.
279          """
280
281         find = "jvpp-request-timeout"
282         replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout)
283
284         argument = '"/{0}/c\\ {1}"'.format(find, replace)
285         path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR)
286         command = "sed -i {0} {1}".format(argument, path)
287
288         ssh = SSH()
289         ssh.connect(node)
290         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
291         if ret_code != 0:
292             raise HoneycombError("Failed to modify configuration on "
293                                  "node {0}, {1}".format(node, stderr))
294
295     @staticmethod
296     def print_environment(nodes):
297         """Print information about the nodes to log. The information is defined
298         by commands in cmds tuple at the beginning of this method.
299
300         :param nodes: List of DUT nodes to get information about.
301         :type nodes: list
302         """
303
304         # TODO: When everything is set and running in VIRL env, transform this
305         # method to a keyword checking the environment.
306
307         cmds = ("uname -a",
308                 "df -lh",
309                 "echo $JAVA_HOME",
310                 "echo $PATH",
311                 "which java",
312                 "java -version",
313                 "dpkg --list | grep openjdk",
314                 "ls -la /opt/honeycomb")
315
316         for node in nodes:
317             if node['type'] == NodeType.DUT:
318                 logger.info("Checking node {} ...".format(node['host']))
319                 for cmd in cmds:
320                     logger.info("Command: {}".format(cmd))
321                     ssh = SSH()
322                     ssh.connect(node)
323                     ssh.exec_command_sudo(cmd)
324
325     @staticmethod
326     def print_ports(node):
327         """Uses "sudo netstat -anp | grep java" to print port where a java
328         application listens.
329
330         :param node: Honeycomb node where we want to print the ports.
331         :type node: dict
332         """
333
334         cmds = ("netstat -anp | grep java",
335                 "ps -ef | grep [h]oneycomb")
336
337         logger.info("Checking node {} ...".format(node['host']))
338         for cmd in cmds:
339             logger.info("Command: {}".format(cmd))
340             ssh = SSH()
341             ssh.connect(node)
342             ssh.exec_command_sudo(cmd)
343
344     @staticmethod
345     def configure_log_level(node, level):
346         """Set Honeycomb logging to the specified level.
347
348         :param node: Honeycomb node.
349         :param level: Log level (INFO, DEBUG, TRACE).
350         :type node: dict
351         :type level: str
352         """
353
354         find = 'logger name=\\"io.fd\\"'
355         replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level)
356
357         argument = '"/{0}/c\\ {1}"'.format(find, replace)
358         path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR)
359         command = "sed -i {0} {1}".format(argument, path)
360
361         ssh = SSH()
362         ssh.connect(node)
363         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
364         if ret_code != 0:
365             raise HoneycombError("Failed to modify configuration on "
366                                  "node {0}, {1}".format(node, stderr))
367
368     @staticmethod
369     def manage_honeycomb_features(node, feature, disable=False):
370         """Configure Honeycomb to use features that are disabled by default, or
371         disable previously enabled features.
372
373         ..Note:: If the module is not enabled in VPP, Honeycomb will
374         be unable to establish VPP connection.
375
376         :param node: Honeycomb node.
377         :param feature: Feature to enable.
378         :param disable: Disable the specified feature instead of enabling it.
379         :type node: dict
380         :type feature: string
381         :type disable: bool
382         :raises HoneycombError: If the configuration could not be changed.
383          """
384
385         disabled_features = {
386             "NSH": "io.fd.hc2vpp.vppnsh.impl.VppNshModule"
387         }
388
389         ssh = SSH()
390         ssh.connect(node)
391
392         if feature in disabled_features.keys():
393             # uncomment by replacing the entire line
394             find = replace = "{0}".format(disabled_features[feature])
395             if disable:
396                 replace = "// {0}".format(find)
397
398             argument = '"/{0}/c\\ {1}"'.format(find, replace)
399             path = "{0}/modules/*module-config"\
400                 .format(Const.REMOTE_HC_DIR)
401             command = "sed -i {0} {1}".format(argument, path)
402
403             (ret_code, _, stderr) = ssh.exec_command_sudo(command)
404             if ret_code != 0:
405                 raise HoneycombError("Failed to modify configuration on "
406                                      "node {0}, {1}".format(node, stderr))
407         else:
408             raise HoneycombError(
409                 "Unrecognized feature {0}.".format(feature))
410
411     @staticmethod
412     def copy_java_libraries(node):
413         """Copy Java libraries installed by vpp-api-java package to honeycomb
414         lib folder.
415
416         This is a (temporary?) workaround for jvpp version mismatches.
417
418         :param node: Honeycomb node
419         :type node: dict
420         """
421
422         ssh = SSH()
423         ssh.connect(node)
424         (_, stdout, _) = ssh.exec_command_sudo(
425             "ls /usr/share/java | grep ^jvpp-*")
426
427         files = stdout.split("\n")[:-1]
428         for item in files:
429             # example filenames:
430             # jvpp-registry-17.04.jar
431             # jvpp-core-17.04.jar
432
433             parts = item.split("-")
434             version = "{0}-SNAPSHOT".format(parts[2][:5])
435             artifact_id = "{0}-{1}".format(parts[0], parts[1])
436
437             directory = "{0}/lib/io/fd/vpp/{1}/{2}".format(
438                 Const.REMOTE_HC_DIR, artifact_id, version)
439             cmd = "sudo mkdir -p {0}; " \
440                   "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format(
441                       directory, item, artifact_id, version)
442
443             (ret_code, _, stderr) = ssh.exec_command(cmd)
444             if ret_code != 0:
445                 raise HoneycombError("Failed to copy JVPP libraries on "
446                                      "node {0}, {1}".format(node, stderr))
447
448     @staticmethod
449     def copy_odl_client(node, odl_name, src_path, dst_path):
450         """Copy ODL Client from source path to destination path.
451
452         :param node: Honeycomb node.
453         :param odl_name: Name of ODL client version to use.
454         :param src_path: Source Path where to find ODl client.
455         :param dst_path: Destination path.
456         :type node: dict
457         :type odl_name: str
458         :type src_path: str
459         :type dst_path: str
460         :raises HoneycombError: If the operation fails.
461         """
462
463         ssh = SSH()
464         ssh.connect(node)
465
466         cmd = "sudo rm -rf {dst}/*karaf_{odl_name} && " \
467               "cp -r {src}/*karaf_{odl_name}* {dst}".format(
468                   src=src_path, odl_name=odl_name, dst=dst_path)
469
470         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=120)
471         if int(ret_code) != 0:
472             raise HoneycombError(
473                 "Failed to copy ODL client on node {0}".format(node["host"]))
474
475     @staticmethod
476     def setup_odl_client(node, path):
477         """Start ODL client on the specified node.
478
479         Karaf should be located in the provided path, and VPP and Honeycomb
480         should already be running, otherwise the start will fail.
481         :param node: Node to start ODL client on.
482         :param path: Path to ODL client on node.
483         :type node: dict
484         :type path: str
485         :raises HoneycombError: If Honeycomb fails to start.
486         """
487
488         logger.console("\nStarting ODL client ...")
489         ssh = SSH()
490         ssh.connect(node)
491
492         cmd = "{path}/*karaf*/bin/start clean".format(path=path)
493         ret_code, _, _ = ssh.exec_command_sudo(cmd)
494
495         if int(ret_code) != 0:
496             raise HoneycombError('Node {0} failed to start ODL.'.
497                                  format(node['host']))
498         else:
499             logger.info("Starting the ODL client on node {0} is "
500                         "in progress ...".format(node['host']))
501
502     @staticmethod
503     def install_odl_features(node, path, *features):
504         """Install required features on a running ODL client.
505
506         :param node: Honeycomb node.
507         :param path: Path to ODL client on node.
508         :param features: Optional, list of additional features to install.
509         :type node: dict
510         :type path: str
511         :type features: list
512         """
513
514         ssh = SSH()
515         ssh.connect(node)
516
517         cmd = "{path}/*karaf*/bin/client -u karaf feature:install " \
518               "odl-restconf-all " \
519               "odl-netconf-connector-all " \
520               "odl-netconf-topology".format(path=path)
521         for feature in features:
522             cmd += " {0}".format(feature)
523
524         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=250)
525
526         if int(ret_code) != 0:
527             raise HoneycombError("Feature install did not succeed.")
528
529     @staticmethod
530     def check_odl_startup_state(node):
531         """Check the status of ODL client startup.
532
533         :param node: Honeycomb node.
534         :param node: dict
535         :returns: True when ODL is started.
536         :rtype: bool
537         :raises HoneycombError: When the response is not code 200: OK.
538         """
539
540         path = HcUtil.read_path_from_url_file(
541             "odl_client/odl_netconf_connector")
542         expected_status_codes = (HTTPCodes.UNAUTHORIZED,
543                                  HTTPCodes.FORBIDDEN,
544                                  HTTPCodes.NOT_FOUND,
545                                  HTTPCodes.SERVICE_UNAVAILABLE,
546                                  HTTPCodes.INTERNAL_SERVER_ERROR)
547
548         status_code, _ = HTTPRequest.get(node, path, timeout=10,
549                                          enable_logging=False)
550         if status_code == HTTPCodes.OK:
551             logger.info("ODL client on node {0} is up and running".
552                         format(node['host']))
553         elif status_code in expected_status_codes:
554             if status_code == HTTPCodes.UNAUTHORIZED:
555                 logger.info('Unauthorized. If this triggers keyword '
556                             'timeout, verify username and password.')
557             raise HoneycombError('ODL client on node {0} running but '
558                                  'not yet ready.'.format(node['host']),
559                                  enable_logging=False)
560         else:
561             raise HoneycombError('Unexpected return code: {0}.'.
562                                  format(status_code))
563         return True
564
565     @staticmethod
566     def check_odl_shutdown_state(node):
567         """Check the status of ODL client shutdown.
568
569         :param node: Honeycomb node.
570         :type node: dict
571         :returns: True when ODL is stopped.
572         :rtype: bool
573         :raises HoneycombError: When the response is not code 200: OK.
574         """
575
576         cmd = "pgrep -f karaf"
577         path = HcUtil.read_path_from_url_file(
578             "odl_client/odl_netconf_connector")
579
580         try:
581             HTTPRequest.get(node, path, timeout=10, enable_logging=False)
582             raise HoneycombError("ODL client is still running.")
583         except HTTPRequestError:
584             logger.debug("Connection refused, checking process state....")
585             ssh = SSH()
586             ssh.connect(node)
587             ret_code, _, _ = ssh.exec_command(cmd)
588             if ret_code == 0:
589                 raise HoneycombError("ODL client is still running.")
590
591         return True
592
593     @staticmethod
594     def mount_honeycomb_on_odl(node):
595         """Tell ODL client to mount Honeycomb instance over netconf.
596
597         :param node: Honeycomb node.
598         :type node: dict
599         :raises HoneycombError: When the response is not code 200: OK.
600         """
601
602         path = HcUtil.read_path_from_url_file(
603             "odl_client/odl_netconf_connector")
604
605         url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC,
606                                     "odl_client/mount_honeycomb.json")
607
608         with open(url_file) as template:
609             data = template.read()
610
611         data = loads(data)
612
613         status_code, _ = HTTPRequest.post(
614             node,
615             path,
616             headers={"Content-Type": "application/json",
617                      "Accept": "text/plain"},
618             json=data,
619             timeout=10,
620             enable_logging=False)
621
622         if status_code == HTTPCodes.OK:
623             logger.info("ODL mount point configured successfully.")
624         elif status_code == HTTPCodes.CONFLICT:
625             logger.info("ODL mount point was already configured.")
626         else:
627             raise HoneycombError('Mount point configuration not successful')
628
629     @staticmethod
630     def stop_odl_client(node, path):
631         """Stop ODL client service on the specified node.
632
633         :param node: Node to start ODL client on.
634         :param path: Path to ODL client.
635         :type node: dict
636         :type path: str
637         :raises HoneycombError: If ODL client fails to stop.
638         """
639
640         ssh = SSH()
641         ssh.connect(node)
642
643         cmd = "{0}/*karaf*/bin/stop".format(path)
644
645         ssh = SSH()
646         ssh.connect(node)
647         ret_code, _, _ = ssh.exec_command_sudo(cmd)
648         if int(ret_code) != 0:
649             logger.debug("ODL Client refused to shut down.")
650             cmd = "pkill -f 'karaf'"
651             (ret_code, _, _) = ssh.exec_command_sudo(cmd)
652             if int(ret_code) != 0:
653                 raise HoneycombError('Node {0} failed to stop ODL.'.
654                                      format(node['host']))
655
656         logger.info("ODL client service stopped.")
657
658     @staticmethod
659     def stop_vpp_service(node):
660         """Stop VPP service on the specified node.
661
662         :param node: VPP node.
663         :type node: dict
664         :raises RuntimeError: If VPP fails to stop.
665         """
666
667         ssh = SSH()
668         ssh.connect(node)
669         cmd = "service vpp stop"
670         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=80)
671         if int(ret_code) != 0:
672             logger.debug("VPP service refused to shut down.")
673
674
675 class HoneycombStartupConfig(object):
676     """Generator for Honeycomb startup configuration.
677     """
678     def __init__(self):
679         """Initializer."""
680
681         self.template = """
682         #!/bin/sh -
683         STATUS=100
684
685         while [ $STATUS -eq 100 ]
686         do
687           {java_call} -jar $(dirname $0)/{jar_filename}
688           STATUS=$?
689           echo "Honeycomb exited with status: $STATUS"
690           if [ $STATUS -eq 100 ]
691           then
692             echo "Restarting..."
693           fi
694         done
695         """
696
697         self.java_call = "{scheduler} {affinity} java{jit_mode}{params}"
698
699         self.scheduler = ""
700         self.core_affinity = ""
701         self.jit_mode = ""
702         self.params = ""
703         self.numa = ""
704
705         self.config = ""
706         self.ssh = SSH()
707
708     def apply_config(self, node):
709         """Generate configuration file /opt/honeycomb/honeycomb on the specified
710          node.
711
712          :param node: Honeycomb node.
713          :type node: dict
714          """
715
716         self.ssh.connect(node)
717         _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar")
718
719         java_call = self.java_call.format(scheduler=self.scheduler,
720                                           affinity=self.core_affinity,
721                                           jit_mode=self.jit_mode,
722                                           params=self.params)
723         self.config = self.template.format(java_call=java_call,
724                                            jar_filename=filename)
725
726         self.ssh.connect(node)
727         cmd = "echo '{config}' > /tmp/honeycomb " \
728               "&& chmod +x /tmp/honeycomb " \
729               "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".\
730             format(config=self.config)
731         self.ssh.exec_command(cmd)
732
733     def set_cpu_scheduler(self, scheduler="FIFO"):
734         """Use alternate CPU scheduler.
735
736         Note: OTHER scheduler doesn't load-balance over isolcpus.
737
738         :param scheduler: CPU scheduler to use.
739         :type scheduler: str
740         """
741
742         schedulers = {"FIFO": "-f 99",  # First In, First Out
743                       "RR": "-r 99",  # Round Robin
744                       "OTHER": "-o",  # Ubuntu default
745                      }
746         self.scheduler = "chrt {0}".format(schedulers[scheduler])
747
748     def set_cpu_core_affinity(self, low, high=None):
749         """Set core affinity for the honeycomb process and subprocesses.
750
751         :param low: Lowest core ID number.
752         :param high: Highest core ID number. Leave empty to use a single core.
753         :type low: int
754         :type high: int
755         """
756
757         self.core_affinity = "taskset -c {low}-{high}".format(
758             low=low, high=high if high else low)
759
760     def set_jit_compiler_mode(self, jit_mode):
761         """Set running mode for Java's JIT compiler.
762
763         :param jit_mode: Desiret JIT mode.
764         :type jit_mode: str
765         """
766
767         modes = {"client": " -client",  # Default
768                  "server": " -server",  # Higher performance but longer warmup
769                  "classic": " -classic"  # Disables JIT compiler
770                 }
771
772         self.jit_mode = modes[jit_mode]
773
774     def set_memory_size(self, mem_min, mem_max=None):
775         """Set minimum and maximum memory use for the JVM.
776
777         :param mem_min: Minimum amount of memory (MB).
778         :param mem_max: Maximum amount of memory (MB). Default is 4 times
779         minimum value.
780         :type mem_min: int
781         :type mem_max: int
782         """
783
784         self.params += " -Xms{min}m -Xmx{max}m".format(
785             min=mem_min, max=mem_max if mem_max else mem_min*4)
786
787     def set_metaspace_size(self, mem_min, mem_max=None):
788         """Set minimum and maximum memory used for class metadata in the JVM.
789
790         :param mem_min: Minimum metaspace size (MB).
791         :param mem_max: Maximum metaspace size (MB). Defailt is 4 times
792         minimum value.
793         :type mem_min: int
794         :type mem_max: int
795         """
796
797         self.params += " -XX:MetaspaceSize={min}m " \
798                        "-XX:MaxMetaspaceSize={max}m".format(
799                            min=mem_min, max=mem_max if mem_max else mem_min*4)
800
801     def set_numa_optimization(self):
802         """Use optimization of memory use and garbage collection for NUMA
803         architectures."""
804
805         self.params += " -XX:+UseNUMA -XX:+UseParallelGC"
806
807     def set_ssh_security_provider(self):
808         """Disables BouncyCastle for SSHD."""
809         # Workaround for issue described in:
810         # https://wiki.fd.io/view/Honeycomb/Releases/1609/Honeycomb_and_ODL
811
812         self.params += " -Dorg.apache.sshd.registerBouncyCastle=false"