HC Test: bugfixes for hc2vpp ODL jobs
[csit.git] / resources / libraries / python / honeycomb / HoneycombSetup.py
1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
5 #
6 #     http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
13
14 """Implementation of keywords for Honeycomb setup."""
15
16 from ipaddress import IPv6Address, AddressValueError
17
18 from robot.api import logger
19
20 from resources.libraries.python.HTTPRequest import HTTPRequest, HTTPCodes, \
21     HTTPRequestError
22 from resources.libraries.python.constants import Constants as Const
23 from resources.libraries.python.honeycomb.HoneycombUtil import HoneycombError
24 from resources.libraries.python.honeycomb.HoneycombUtil \
25     import HoneycombUtil as HcUtil
26 from resources.libraries.python.ssh import SSH
27 from resources.libraries.python.topology import NodeType
28
29
30 class HoneycombSetup(object):
31     """Implements keywords for Honeycomb setup.
32
33     The keywords implemented in this class make possible to:
34     - start Honeycomb,
35     - stop Honeycomb,
36     - check the Honeycomb start-up state,
37     - check the Honeycomb shutdown state,
38     - add VPP to the topology.
39     """
40
41     def __init__(self):
42         pass
43
44     @staticmethod
45     def start_honeycomb_on_duts(*nodes):
46         """Start Honeycomb on specified DUT nodes.
47
48         This keyword starts the Honeycomb service on specified DUTs.
49         The keyword just starts the Honeycomb and does not check its startup
50         state. Use the keyword "Check Honeycomb Startup State" to check if the
51         Honeycomb is up and running.
52         Honeycomb must be installed in "/opt" directory, otherwise the start
53         will fail.
54         :param nodes: List of nodes to start Honeycomb on.
55         :type nodes: list
56         :raises HoneycombError: If Honeycomb fails to start.
57         """
58
59         HoneycombSetup.print_environment(nodes)
60
61         logger.console("\n(re)Starting Honeycomb service ...")
62
63         cmd = "sudo service honeycomb start"
64
65         for node in nodes:
66             if node['type'] == NodeType.DUT:
67                 ssh = SSH()
68                 ssh.connect(node)
69                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
70                 if int(ret_code) != 0:
71                     raise HoneycombError('Node {0} failed to start Honeycomb.'.
72                                          format(node['host']))
73                 else:
74                     logger.info("Starting the Honeycomb service on node {0} is "
75                                 "in progress ...".format(node['host']))
76
77     @staticmethod
78     def stop_honeycomb_on_duts(*nodes):
79         """Stop the Honeycomb service on specified DUT nodes.
80
81         This keyword stops the Honeycomb service on specified nodes. It just
82         stops the Honeycomb and does not check its shutdown state. Use the
83         keyword "Check Honeycomb Shutdown State" to check if Honeycomb has
84         stopped.
85         :param nodes: List of nodes to stop Honeycomb on.
86         :type nodes: list
87         :raises HoneycombError: If Honeycomb failed to stop.
88         """
89         logger.console("\nShutting down Honeycomb service ...")
90
91         cmd = "sudo service honeycomb stop"
92         errors = []
93
94         for node in nodes:
95             if node['type'] == NodeType.DUT:
96                 ssh = SSH()
97                 ssh.connect(node)
98                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
99                 if int(ret_code) != 0:
100                     errors.append(node['host'])
101                 else:
102                     logger.info("Stopping the Honeycomb service on node {0} is "
103                                 "in progress ...".format(node['host']))
104         if errors:
105             raise HoneycombError('Node(s) {0} failed to stop Honeycomb.'.
106                                  format(errors))
107
108     @staticmethod
109     def restart_honeycomb_on_dut(node):
110         """Restart Honeycomb on specified DUT nodes.
111
112         This keyword restarts the Honeycomb service on specified DUTs. Use the
113         keyword "Check Honeycomb Startup State" to check if the Honeycomb is up
114         and running.
115
116         :param node: Node to restart Honeycomb on.
117         :type node: dict
118         :raises HoneycombError: If Honeycomb fails to start.
119         """
120
121         logger.console("\n(re)Starting Honeycomb service ...")
122
123         cmd = "sudo service honeycomb restart"
124
125         ssh = SSH()
126         ssh.connect(node)
127         (ret_code, _, _) = ssh.exec_command_sudo(cmd)
128         if int(ret_code) != 0:
129             raise HoneycombError('Node {0} failed to restart Honeycomb.'.
130                                  format(node['host']))
131         else:
132             logger.info(
133                 "Honeycomb service restart is in progress on node {0}".format(
134                     node['host']))
135
136     @staticmethod
137     def check_honeycomb_startup_state(*nodes):
138         """Check state of Honeycomb service during startup on specified nodes.
139
140         Reads html path from template file oper_vpp_version.url.
141
142         Honeycomb nodes reply with connection refused or the following status
143         codes depending on startup progress: codes 200, 401, 403, 404, 500, 503
144
145         :param nodes: List of DUT nodes starting Honeycomb.
146         :type nodes: list
147         :return: True if all GETs returned code 200(OK).
148         :rtype bool
149         """
150         path = HcUtil.read_path_from_url_file("oper_vpp_version")
151         expected_status_codes = (HTTPCodes.UNAUTHORIZED,
152                                  HTTPCodes.FORBIDDEN,
153                                  HTTPCodes.NOT_FOUND,
154                                  HTTPCodes.SERVICE_UNAVAILABLE,
155                                  HTTPCodes.INTERNAL_SERVER_ERROR)
156
157         for node in nodes:
158             if node['type'] == NodeType.DUT:
159                 HoneycombSetup.print_ports(node)
160                 try:
161                     status_code, _ = HTTPRequest.get(node, path,
162                                                      enable_logging=False)
163                 except HTTPRequestError:
164                     ssh = SSH()
165                     ssh.connect(node)
166                     ret_code, _, _ = ssh.exec_command_sudo(
167                         "tail -n 100 /var/log/syslog")
168                     if ret_code != 0:
169                         # It's probably Centos
170                         ssh.exec_command_sudo("tail -n 100 /var/log/messages")
171                     raise
172                 if status_code == HTTPCodes.OK:
173                     logger.info("Honeycomb on node {0} is up and running".
174                                 format(node['host']))
175                 elif status_code in expected_status_codes:
176                     if status_code == HTTPCodes.UNAUTHORIZED:
177                         logger.info('Unauthorized. If this triggers keyword '
178                                     'timeout, verify Honeycomb username and '
179                                     'password.')
180                     raise HoneycombError('Honeycomb on node {0} running but '
181                                          'not yet ready.'.format(node['host']),
182                                          enable_logging=False)
183                 else:
184                     raise HoneycombError('Unexpected return code: {0}.'.
185                                          format(status_code))
186
187                 status_code, _ = HcUtil.get_honeycomb_data(
188                     node, "config_vpp_interfaces")
189                 if status_code != HTTPCodes.OK:
190                     raise HoneycombError('Honeycomb on node {0} running but '
191                                          'not yet ready.'.format(node['host']),
192                                          enable_logging=False)
193         return True
194
195     @staticmethod
196     def check_honeycomb_shutdown_state(*nodes):
197         """Check state of Honeycomb service during shutdown on specified nodes.
198
199         Honeycomb nodes reply with connection refused or the following status
200         codes depending on shutdown progress: codes 200, 404.
201
202         :param nodes: List of DUT nodes stopping Honeycomb.
203         :type nodes: list
204         :return: True if all GETs fail to connect.
205         :rtype bool
206         """
207         cmd = "ps -ef | grep -v grep | grep honeycomb"
208         for node in nodes:
209             if node['type'] == NodeType.DUT:
210                 try:
211                     status_code, _ = HTTPRequest.get(node, '/index.html',
212                                                      enable_logging=False)
213                     if status_code == HTTPCodes.OK:
214                         raise HoneycombError('Honeycomb on node {0} is still '
215                                              'running.'.format(node['host']),
216                                              enable_logging=False)
217                     elif status_code == HTTPCodes.NOT_FOUND:
218                         raise HoneycombError('Honeycomb on node {0} is shutting'
219                                              ' down.'.format(node['host']),
220                                              enable_logging=False)
221                     else:
222                         raise HoneycombError('Unexpected return code: {0}.'.
223                                              format(status_code))
224                 except HTTPRequestError:
225                     logger.debug('Connection refused, checking the process '
226                                  'state ...')
227                     ssh = SSH()
228                     ssh.connect(node)
229                     (ret_code, _, _) = ssh.exec_command_sudo(cmd)
230                     if ret_code == 0:
231                         raise HoneycombError('Honeycomb on node {0} is still '
232                                              'running.'.format(node['host']),
233                                              enable_logging=False)
234                     else:
235                         logger.info("Honeycomb on node {0} has stopped".
236                                     format(node['host']))
237         return True
238
239     @staticmethod
240     def configure_restconf_binding_address(node):
241         """Configure Honeycomb to accept restconf requests from all IP
242         addresses. IP version is determined by node data.
243
244          :param node: Information about a DUT node.
245          :type node: dict
246          :raises HoneycombError: If the configuration could not be changed.
247          """
248
249         find = "restconf-binding-address"
250         try:
251             IPv6Address(unicode(node["host"]))
252             # if management IP of the node is in IPv6 format
253             replace = '\\"restconf-binding-address\\": \\"0::0\\",'
254         except (AttributeError, AddressValueError):
255             replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",'
256
257         argument = '"/{0}/c\\ {1}"'.format(find, replace)
258         path = "{0}/config/honeycomb.json".format(Const.REMOTE_HC_DIR)
259         command = "sed -i {0} {1}".format(argument, path)
260
261         ssh = SSH()
262         ssh.connect(node)
263         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
264         if ret_code != 0:
265             raise HoneycombError("Failed to modify configuration on "
266                                  "node {0}, {1}".format(node, stderr))
267
268     @staticmethod
269     def configure_jvpp_timeout(node, timeout=10):
270         """Configure timeout value for Java API commands Honeycomb sends to VPP.
271
272          :param node: Information about a DUT node.
273          :param timeout: Timeout value in seconds.
274          :type node: dict
275          :type timeout: int
276          :raises HoneycombError: If the configuration could not be changed.
277          """
278
279         find = "jvpp-request-timeout"
280         replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout)
281
282         argument = '"/{0}/c\\ {1}"'.format(find, replace)
283         path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR)
284         command = "sed -i {0} {1}".format(argument, path)
285
286         ssh = SSH()
287         ssh.connect(node)
288         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
289         if ret_code != 0:
290             raise HoneycombError("Failed to modify configuration on "
291                                  "node {0}, {1}".format(node, stderr))
292
293     @staticmethod
294     def print_environment(nodes):
295         """Print information about the nodes to log. The information is defined
296         by commands in cmds tuple at the beginning of this method.
297
298         :param nodes: List of DUT nodes to get information about.
299         :type nodes: list
300         """
301
302         # TODO: When everything is set and running in VIRL env, transform this
303         # method to a keyword checking the environment.
304
305         cmds = ("uname -a",
306                 "df -lh",
307                 "echo $JAVA_HOME",
308                 "echo $PATH",
309                 "which java",
310                 "java -version",
311                 "dpkg --list | grep openjdk",
312                 "ls -la /opt/honeycomb")
313
314         for node in nodes:
315             if node['type'] == NodeType.DUT:
316                 logger.info("Checking node {} ...".format(node['host']))
317                 for cmd in cmds:
318                     logger.info("Command: {}".format(cmd))
319                     ssh = SSH()
320                     ssh.connect(node)
321                     ssh.exec_command_sudo(cmd)
322
323     @staticmethod
324     def print_ports(node):
325         """Uses "sudo netstat -anp | grep java" to print port where a java
326         application listens.
327
328         :param node: Honeycomb node where we want to print the ports.
329         :type node: dict
330         """
331
332         cmds = ("netstat -anp | grep java",
333                 "ps -ef | grep [h]oneycomb")
334
335         logger.info("Checking node {} ...".format(node['host']))
336         for cmd in cmds:
337             logger.info("Command: {}".format(cmd))
338             ssh = SSH()
339             ssh.connect(node)
340             ssh.exec_command_sudo(cmd)
341
342     @staticmethod
343     def configure_log_level(node, level):
344         """Set Honeycomb logging to the specified level.
345
346         :param node: Honeycomb node.
347         :param level: Log level (INFO, DEBUG, TRACE).
348         :type node: dict
349         :type level: str
350         """
351
352         find = 'logger name=\\"io.fd\\"'
353         replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level)
354
355         argument = '"/{0}/c\\ {1}"'.format(find, replace)
356         path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR)
357         command = "sed -i {0} {1}".format(argument, path)
358
359         ssh = SSH()
360         ssh.connect(node)
361         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
362         if ret_code != 0:
363             raise HoneycombError("Failed to modify configuration on "
364                                  "node {0}, {1}".format(node, stderr))
365
366     @staticmethod
367     def manage_honeycomb_features(node, feature, disable=False):
368         """Configure Honeycomb to use features that are disabled by default, or
369         disable previously enabled features.
370
371         ..Note:: If the module is not enabled in VPP, Honeycomb will
372         be unable to establish VPP connection.
373
374         :param node: Honeycomb node.
375         :param feature: Feature to enable.
376         :param disable: Disable the specified feature instead of enabling it.
377         :type node: dict
378         :type feature: string
379         :type disable: bool
380         :raises HoneycombError: If the configuration could not be changed.
381          """
382
383         disabled_features = {
384             "NSH": "io.fd.hc2vpp.vppnsh.impl.VppNshModule"
385         }
386
387         ssh = SSH()
388         ssh.connect(node)
389
390         if feature in disabled_features.keys():
391             # uncomment by replacing the entire line
392             find = replace = "{0}".format(disabled_features[feature])
393             if disable:
394                 replace = "// {0}".format(find)
395
396             argument = '"/{0}/c\\ {1}"'.format(find, replace)
397             path = "{0}/modules/*module-config"\
398                 .format(Const.REMOTE_HC_DIR)
399             command = "sed -i {0} {1}".format(argument, path)
400
401             (ret_code, _, stderr) = ssh.exec_command_sudo(command)
402             if ret_code != 0:
403                 raise HoneycombError("Failed to modify configuration on "
404                                      "node {0}, {1}".format(node, stderr))
405         else:
406             raise HoneycombError(
407                 "Unrecognized feature {0}.".format(feature))
408
409     @staticmethod
410     def copy_java_libraries(node):
411         """Copy Java libraries installed by vpp-api-java package to honeycomb
412         lib folder.
413
414         This is a (temporary?) workaround for jvpp version mismatches.
415
416         :param node: Honeycomb node
417         :type node: dict
418         """
419
420         ssh = SSH()
421         ssh.connect(node)
422         (_, stdout, _) = ssh.exec_command_sudo(
423             "ls /usr/share/java | grep ^jvpp-*")
424
425         files = stdout.split("\n")[:-1]
426         for item in files:
427             # example filenames:
428             # jvpp-registry-17.04.jar
429             # jvpp-core-17.04.jar
430
431             parts = item.split("-")
432             version = "{0}-SNAPSHOT".format(parts[2][:5])
433             artifact_id = "{0}-{1}".format(parts[0], parts[1])
434
435             directory = "{0}/lib/io/fd/vpp/{1}/{2}".format(
436                 Const.REMOTE_HC_DIR, artifact_id, version)
437             cmd = "sudo mkdir -p {0}; " \
438                   "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format(
439                       directory, item, artifact_id, version)
440
441             (ret_code, _, stderr) = ssh.exec_command(cmd)
442             if ret_code != 0:
443                 raise HoneycombError("Failed to copy JVPP libraries on "
444                                      "node {0}, {1}".format(node, stderr))
445
446     @staticmethod
447     def copy_odl_client(node, odl_name, src_path, dst_path):
448         """Copy ODL Client from source path to destination path.
449
450         :param node: Honeycomb node.
451         :param odl_name: Name of ODL client version to use.
452         :param src_path: Source Path where to find ODl client.
453         :param dst_path: Destination path.
454         :type node: dict
455         :type odl_name: str
456         :type src_path: str
457         :type dst_path: str
458         :raises HoneycombError: If the operation fails.
459         """
460
461         ssh = SSH()
462         ssh.connect(node)
463
464         cmd = "cp -r {src}/*karaf_{odl_name}* {dst}".format(
465             src=src_path, odl_name=odl_name, dst=dst_path)
466
467         ret_code, _, _ = ssh.exec_command(cmd, timeout=60)
468         if int(ret_code) != 0:
469             raise HoneycombError(
470                 "Failed to copy ODL client on node {0}".format(node["host"]))
471
472     @staticmethod
473     def setup_odl_client(node, path):
474         """Start ODL client on the specified node.
475
476         Karaf should be located in the provided path, and VPP and Honeycomb
477         should already be running, otherwise the start will fail.
478         :param node: Node to start ODL client on.
479         :param path: Path to ODL client on node.
480         :type node: dict
481         :type path: str
482         :raises HoneycombError: If Honeycomb fails to start.
483         """
484
485         logger.console("\nStarting ODL client ...")
486         ssh = SSH()
487         ssh.connect(node)
488
489         cmd = "{path}/*karaf*/bin/start clean".format(path=path)
490         ret_code, _, _ = ssh.exec_command_sudo(cmd)
491
492         if int(ret_code) != 0:
493             raise HoneycombError('Node {0} failed to start ODL.'.
494                                  format(node['host']))
495         else:
496             logger.info("Starting the ODL client on node {0} is "
497                         "in progress ...".format(node['host']))
498
499     @staticmethod
500     def install_odl_features(node, path, *features):
501         """Install required features on a running ODL client.
502
503         :param node: Honeycomb node.
504         :param path: Path to ODL client on node.
505         :param features: Optional, list of additional features to install.
506         :type node: dict
507         :type path: str
508         :type features: list
509         """
510
511         ssh = SSH()
512         ssh.connect(node)
513
514         cmd = "{path}/*karaf*/bin/client -u karaf feature:install " \
515               "odl-restconf-all odl-netconf-connector-all".format(path=path)
516         for feature in features:
517             cmd += " {0}".format(feature)
518
519         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=120)
520
521         if int(ret_code) != 0:
522             raise HoneycombError("Feature install did not succeed.")
523
524     @staticmethod
525     def check_odl_startup_state(node):
526         """Check the status of ODL client startup.
527
528         :param node: Honeycomb node.
529         :param node: dict
530         :returns: True when ODL is started.
531         :rtype: bool
532         :raises HoneycombError: When the response is not code 200: OK.
533         """
534
535         path = HcUtil.read_path_from_url_file(
536             "odl_client/odl_netconf_connector")
537         expected_status_codes = (HTTPCodes.UNAUTHORIZED,
538                                  HTTPCodes.FORBIDDEN,
539                                  HTTPCodes.NOT_FOUND,
540                                  HTTPCodes.SERVICE_UNAVAILABLE,
541                                  HTTPCodes.INTERNAL_SERVER_ERROR)
542
543         status_code, _ = HTTPRequest.get(node, path, timeout=10,
544                                          enable_logging=False)
545         if status_code == HTTPCodes.OK:
546             logger.info("ODL client on node {0} is up and running".
547                         format(node['host']))
548         elif status_code in expected_status_codes:
549             if status_code == HTTPCodes.UNAUTHORIZED:
550                 logger.info('Unauthorized. If this triggers keyword '
551                             'timeout, verify username and password.')
552             raise HoneycombError('ODL client on node {0} running but '
553                                  'not yet ready.'.format(node['host']),
554                                  enable_logging=False)
555         else:
556             raise HoneycombError('Unexpected return code: {0}.'.
557                                  format(status_code))
558         return True
559
560     @staticmethod
561     def check_odl_shutdown_state(node):
562         """Check the status of ODL client shutdown.
563
564         :param node: Honeycomb node.
565         :type node: dict
566         :returns: True when ODL is stopped.
567         :rtype: bool
568         :raises HoneycombError: When the response is not code 200: OK.
569         """
570
571         cmd = "pgrep -f karaf"
572         path = HcUtil.read_path_from_url_file(
573             "odl_client/odl_netconf_connector")
574
575         try:
576             status_code, _ = HTTPRequest.get(node, path, timeout=10,
577                                              enable_logging=False)
578             raise HoneycombError("ODL client is still running.")
579         except HTTPRequestError:
580             logger.debug("Connection refused, checking process state....")
581             ssh = SSH()
582             ssh.connect(node)
583             ret_code, _, _ = ssh.exec_command(cmd)
584             if ret_code == 0:
585                 raise HoneycombError("ODL client is still running.")
586
587         return True
588
589     @staticmethod
590     def mount_honeycomb_on_odl(node):
591         """Tell ODL client to mount Honeycomb instance over netconf.
592
593         :param node: Honeycomb node.
594         :type node: dict
595         :raises HoneycombError: When the response is not code 200: OK.
596         """
597
598         path = HcUtil.read_path_from_url_file(
599             "odl_client/odl_netconf_connector")
600
601         url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC,
602                                     "odl_client/mount_honeycomb.xml")
603
604         with open(url_file) as template:
605             data = template.read()
606
607         status_code, _ = HTTPRequest.post(
608             node, path, headers={"Content-Type": "application/xml"},
609             payload=data, timeout=10, enable_logging=False)
610
611         if status_code == HTTPCodes.OK:
612             logger.info("ODL mount point configured successfully.")
613         elif status_code == HTTPCodes.CONFLICT:
614             logger.info("ODL mount point was already configured.")
615         else:
616             raise HoneycombError('Mount point configuration not successful')
617
618     @staticmethod
619     def stop_odl_client(node, path):
620         """Stop ODL client service on the specified node.
621
622         :param node: Node to start ODL client on.
623         :param path: Path to ODL client.
624         :type node: dict
625         :type path: str
626         :raises HoneycombError: If ODL client fails to stop.
627         """
628
629         ssh = SSH()
630         ssh.connect(node)
631
632         cmd = "{0}/*karaf*/bin/stop".format(path)
633
634         ssh = SSH()
635         ssh.connect(node)
636         ret_code, _, _ = ssh.exec_command_sudo(cmd)
637         if int(ret_code) != 0:
638             logger.debug("ODL Client refused to shut down.")
639             cmd = "pkill -f 'karaf'"
640             (ret_code, _, _) = ssh.exec_command_sudo(cmd)
641             if int(ret_code) != 0:
642                 raise HoneycombError('Node {0} failed to stop ODL.'.
643                                      format(node['host']))
644
645         logger.info("ODL client service stopped.")
646
647     @staticmethod
648     def stop_vpp_service(node):
649         """Stop VPP service on the specified node.
650
651         :param node: VPP node.
652         :type node: dict
653         :raises RuntimeError: If VPP fails to stop.
654         """
655
656         ssh = SSH()
657         ssh.connect(node)
658         cmd = "service vpp stop"
659         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=80)
660         if int(ret_code) != 0:
661             logger.debug("VPP service refused to shut down.")
662
663 class HoneycombStartupConfig(object):
664     """Generator for Honeycomb startup configuration.
665     """
666     def __init__(self):
667         """Initializer."""
668
669         self.template = """
670         #!/bin/sh -
671         STATUS=100
672
673         while [ $STATUS -eq 100 ]
674         do
675           {java_call} -jar $(dirname $0)/{jar_filename}
676           STATUS=$?
677           echo "Honeycomb exited with status: $STATUS"
678           if [ $STATUS -eq 100 ]
679           then
680             echo "Restarting..."
681           fi
682         done
683         """
684
685         self.java_call = "{scheduler} {affinity} java {jit_mode} {params}"
686
687         self.scheduler = ""
688         self.core_affinity = ""
689         self.jit_mode = ""
690         self.params = ""
691         self.numa = ""
692
693         self.config = ""
694         self.ssh = SSH()
695
696     def apply_config(self, node):
697         """Generate configuration file /opt/honeycomb/honeycomb on the specified
698          node.
699
700          :param node: Honeycomb node.
701          :type node: dict
702          """
703
704         self.ssh.connect(node)
705         _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar")
706
707         java_call = self.java_call.format(scheduler=self.scheduler,
708                                           affinity=self.core_affinity,
709                                           jit_mode=self.jit_mode,
710                                           params=self.params)
711         self.config = self.template.format(java_call=java_call,
712                                            jar_filename=filename)
713
714         self.ssh.connect(node)
715         cmd = "echo '{config}' > /tmp/honeycomb " \
716               "&& chmod +x /tmp/honeycomb " \
717               "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".format(
718                 config=self.config)
719         self.ssh.exec_command(cmd)
720
721     def set_cpu_scheduler(self, scheduler="FIFO"):
722         """Use alternate CPU scheduler.
723
724         Note: OTHER scheduler doesn't load-balance over isolcpus.
725
726         :param scheduler: CPU scheduler to use.
727         :type scheduler: str
728         """
729
730         schedulers = {"FIFO": "-f 99",  # First In, First Out
731                       "RR": "-r 99",  # Round Robin
732                       "OTHER": "-o",  # Ubuntu default
733                      }
734         self.scheduler = "chrt {0}".format(schedulers[scheduler])
735
736     def set_cpu_core_affinity(self, low, high=None):
737         """Set core affinity for the honeycomb process and subprocesses.
738
739         :param low: Lowest core ID number.
740         :param high: Highest core ID number. Leave empty to use a single core.
741         :type low: int
742         :type high: int
743         """
744
745         self.core_affinity = "taskset -c {low}-{high}".format(
746             low=low, high=high if high else low)
747
748     def set_jit_compiler_mode(self, jit_mode):
749         """Set running mode for Java's JIT compiler.
750
751         :param jit_mode: Desiret JIT mode.
752         :type jit_mode: str
753         """
754
755         modes = {"client": "-client",  # Default
756                  "server": "-server",  # Higher performance but longer warmup
757                  "classic": "-classic"  # Disables JIT compiler
758                 }
759
760         self.jit_mode = modes[jit_mode]
761
762     def set_memory_size(self, mem_min, mem_max=None):
763         """Set minimum and maximum memory use for the JVM.
764
765         :param mem_min: Minimum amount of memory (MB).
766         :param mem_max: Maximum amount of memory (MB). Default is 4 times
767         minimum value.
768         :type mem_min: int
769         :type mem_max: int
770         """
771
772         self.params += " -Xms{min}m -Xmx{max}m".format(
773             min=mem_min, max=mem_max if mem_max else mem_min*4)
774
775     def set_metaspace_size(self, mem_min, mem_max=None):
776         """Set minimum and maximum memory used for class metadata in the JVM.
777
778         :param mem_min: Minimum metaspace size (MB).
779         :param mem_max: Maximum metaspace size (MB). Defailt is 4 times
780         minimum value.
781         :type mem_min: int
782         :type mem_max: int
783         """
784
785         self.params += " -XX:MetaspaceSize={min}m " \
786                        "-XX:MaxMetaspaceSize={max}m".format(
787                            min=mem_min, max=mem_max if mem_max else mem_min*4)
788
789     def set_numa_optimization(self):
790         """Use optimization of memory use and garbage collection for NUMA
791         architectures."""
792
793         self.params += " -XX:+UseNUMA -XX:+UseParallelGC"