b5e964dd6c807de9e452588fa68d8ecb38520619
[csit.git] / resources / libraries / python / honeycomb / HoneycombSetup.py
1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
5 #
6 #     http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
13
14 """Implementation of keywords for Honeycomb setup."""
15
16 from json import loads
17 from time import time, sleep
18
19 from ipaddress import IPv6Address, AddressValueError
20
21 from robot.api import logger
22
23 from resources.libraries.python.HTTPRequest import HTTPRequest, HTTPCodes, \
24     HTTPRequestError
25 from resources.libraries.python.constants import Constants as Const
26 from resources.libraries.python.honeycomb.HoneycombUtil import HoneycombError
27 from resources.libraries.python.honeycomb.HoneycombUtil \
28     import HoneycombUtil as HcUtil
29 from resources.libraries.python.ssh import SSH
30 from resources.libraries.python.topology import NodeType
31
32
33 class HoneycombSetup(object):
34     """Implements keywords for Honeycomb setup.
35
36     The keywords implemented in this class make possible to:
37     - start Honeycomb,
38     - stop Honeycomb,
39     - check the Honeycomb start-up state,
40     - check the Honeycomb shutdown state,
41     - add VPP to the topology.
42     """
43
44     def __init__(self):
45         pass
46
47     @staticmethod
48     def start_honeycomb_on_duts(*nodes):
49         """Start Honeycomb on specified DUT nodes.
50
51         This keyword starts the Honeycomb service on specified DUTs.
52         The keyword just starts the Honeycomb and does not check its startup
53         state. Use the keyword "Check Honeycomb Startup State" to check if the
54         Honeycomb is up and running.
55         Honeycomb must be installed in "/opt" directory, otherwise the start
56         will fail.
57         :param nodes: List of nodes to start Honeycomb on.
58         :type nodes: list
59         :raises HoneycombError: If Honeycomb fails to start.
60         """
61
62         HoneycombSetup.print_environment(nodes)
63
64         logger.console("\n(re)Starting Honeycomb service ...")
65
66         cmd = "sudo service honeycomb start"
67
68         for node in nodes:
69             if node['type'] == NodeType.DUT:
70                 ssh = SSH()
71                 ssh.connect(node)
72                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
73                 if int(ret_code) != 0:
74                     raise HoneycombError('Node {0} failed to start Honeycomb.'.
75                                          format(node['host']))
76                 else:
77                     logger.info("Starting the Honeycomb service on node {0} is "
78                                 "in progress ...".format(node['host']))
79
80     @staticmethod
81     def stop_honeycomb_on_duts(*nodes):
82         """Stop the Honeycomb service on specified DUT nodes.
83
84         This keyword stops the Honeycomb service on specified nodes. It just
85         stops the Honeycomb and does not check its shutdown state. Use the
86         keyword "Check Honeycomb Shutdown State" to check if Honeycomb has
87         stopped.
88         :param nodes: List of nodes to stop Honeycomb on.
89         :type nodes: list
90         :raises HoneycombError: If Honeycomb failed to stop.
91         """
92         logger.console("\nShutting down Honeycomb service ...")
93
94         cmd = "sudo service honeycomb stop"
95         errors = []
96
97         for node in nodes:
98             if node['type'] == NodeType.DUT:
99                 ssh = SSH()
100                 ssh.connect(node)
101                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
102                 if int(ret_code) != 0:
103                     errors.append(node['host'])
104                 else:
105                     logger.info("Stopping the Honeycomb service on node {0} is "
106                                 "in progress ...".format(node['host']))
107         if errors:
108             raise HoneycombError('Node(s) {0} failed to stop Honeycomb.'.
109                                  format(errors))
110
111     @staticmethod
112     def restart_honeycomb_on_dut(node):
113         """Restart Honeycomb on specified DUT nodes.
114
115         This keyword restarts the Honeycomb service on specified DUTs. Use the
116         keyword "Check Honeycomb Startup State" to check if the Honeycomb is up
117         and running.
118
119         :param node: Node to restart Honeycomb on.
120         :type node: dict
121         :raises HoneycombError: If Honeycomb fails to start.
122         """
123
124         logger.console("\n(re)Starting Honeycomb service ...")
125
126         cmd = "sudo service honeycomb restart"
127
128         ssh = SSH()
129         ssh.connect(node)
130         (ret_code, _, _) = ssh.exec_command_sudo(cmd)
131         if int(ret_code) != 0:
132             raise HoneycombError('Node {0} failed to restart Honeycomb.'.
133                                  format(node['host']))
134         else:
135             logger.info(
136                 "Honeycomb service restart is in progress on node {0}".format(
137                     node['host']))
138
139     @staticmethod
140     def check_honeycomb_startup_state(node, timeout=360, retries=20,
141                                       interval=15):
142         """Repeatedly check the status of Honeycomb startup until it is fully
143         started or until timeout or max retries is reached.
144
145         :param node: Honeycomb node.
146         :param timeout: Timeout value in seconds.
147         :param retries: Max number of retries.
148         :param interval: Interval between checks, in seconds.
149         :type node: dict
150         :type timeout: int
151         :type retries: int
152         :type interval: int
153         :raises HoneycombError: If the Honeycomb process IP cannot be found,
154         or if timeout or number of retries is exceeded."""
155
156         ssh = SSH()
157         ssh.connect(node)
158         ret_code, pid, _ = ssh.exec_command("pgrep honeycomb")
159         if ret_code != 0:
160             raise HoneycombError("No process named 'honeycomb' found.")
161
162         pid = int(pid)
163         count = 0
164         start = time()
165         while time() - start < timeout and count < retries:
166             count += 1
167             ret_code, _, _ = ssh.exec_command(
168                 " | ".join([
169                     "sudo tail -n 1000 /var/log/syslog",
170                     "grep {pid}".format(pid=pid),
171                     "grep 'Honeycomb started successfully!'"])
172             )
173             if ret_code != 0:
174                 logger.debug(
175                     "Attempt #{count} failed on log check.".format(
176                         count=count))
177                 sleep(interval)
178                 continue
179             status_code_version, _ = HcUtil.get_honeycomb_data(
180                 node, "oper_vpp_version")
181             status_code_if_cfg, _ = HcUtil.get_honeycomb_data(
182                 node, "config_vpp_interfaces")
183             status_code_if_oper, _ = HcUtil.get_honeycomb_data(
184                 node, "oper_vpp_interfaces")
185             if status_code_if_cfg == HTTPCodes.OK\
186                     and status_code_if_cfg == HTTPCodes.OK\
187                     and status_code_if_oper == HTTPCodes.OK:
188                 logger.info("Check successful, Honeycomb is up and running.")
189                 break
190             else:
191                 logger.debug(
192                     "Attempt ${count} failed on Restconf check. Status codes:\n"
193                     "Version: {version}\n"
194                     "Interface config: {if_cfg}\n"
195                     "Interface operational: {if_oper}".format(
196                         count=count,
197                         version=status_code_version,
198                         if_cfg=status_code_if_cfg,
199                         if_oper=status_code_if_oper))
200                 sleep(interval)
201                 continue
202         else:
203             _, vpp_status, _ = ssh.exec_command("service vpp status")
204             ret_code, hc_log, _ = ssh.exec_command(
205                 " | ".join([
206                     "sudo tail -n 1000 /var/log/syslog",
207                     "grep {pid}".format(pid=pid)]))
208             raise HoneycombError(
209                 "Timeout or max retries exceeded. Status of VPP:\n"
210                 "{vpp_status}\n"
211                 "Syslog entries filtered by Honeycomb's pid:\n"
212                 "{hc_log}".format(vpp_status=vpp_status, hc_log=hc_log))
213
214     @staticmethod
215     def check_honeycomb_shutdown_state(node):
216         """Check state of Honeycomb service during shutdown on specified nodes.
217
218         Honeycomb nodes reply with connection refused or the following status
219         codes depending on shutdown progress: codes 200, 404.
220
221         :param node: List of DUT nodes stopping Honeycomb.
222         :type node: dict
223         :return: True if all GETs fail to connect.
224         :rtype bool
225         """
226         cmd = "pgrep honeycomb"
227
228         ssh = SSH()
229         ssh.connect(node)
230         (ret_code, _, _) = ssh.exec_command_sudo(cmd)
231         if ret_code == 0:
232             raise HoneycombError('Honeycomb on node {0} is still '
233                                  'running.'.format(node['host']),
234                                  enable_logging=False)
235         else:
236             logger.info("Honeycomb on node {0} has stopped".
237                         format(node['host']))
238         return True
239
240     @staticmethod
241     def configure_restconf_binding_address(node):
242         """Configure Honeycomb to accept restconf requests from all IP
243         addresses. IP version is determined by node data.
244
245          :param node: Information about a DUT node.
246          :type node: dict
247          :raises HoneycombError: If the configuration could not be changed.
248          """
249
250         find = "restconf-binding-address"
251         try:
252             IPv6Address(unicode(node["host"]))
253             # if management IP of the node is in IPv6 format
254             replace = '\\"restconf-binding-address\\": \\"0::0\\",'
255         except (AttributeError, AddressValueError):
256             replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",'
257
258         argument = '"/{0}/c\\ {1}"'.format(find, replace)
259         path = "{0}/config/restconf.json".format(Const.REMOTE_HC_DIR)
260         command = "sed -i {0} {1}".format(argument, path)
261
262         ssh = SSH()
263         ssh.connect(node)
264         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
265         if ret_code != 0:
266             raise HoneycombError("Failed to modify configuration on "
267                                  "node {0}, {1}".format(node, stderr))
268
269     @staticmethod
270     def configure_jvpp_timeout(node, timeout=10):
271         """Configure timeout value for Java API commands Honeycomb sends to VPP.
272
273          :param node: Information about a DUT node.
274          :param timeout: Timeout value in seconds.
275          :type node: dict
276          :type timeout: int
277          :raises HoneycombError: If the configuration could not be changed.
278          """
279
280         find = "jvpp-request-timeout"
281         replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout)
282
283         argument = '"/{0}/c\\ {1}"'.format(find, replace)
284         path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR)
285         command = "sed -i {0} {1}".format(argument, path)
286
287         ssh = SSH()
288         ssh.connect(node)
289         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
290         if ret_code != 0:
291             raise HoneycombError("Failed to modify configuration on "
292                                  "node {0}, {1}".format(node, stderr))
293
294     @staticmethod
295     def print_environment(nodes):
296         """Print information about the nodes to log. The information is defined
297         by commands in cmds tuple at the beginning of this method.
298
299         :param nodes: List of DUT nodes to get information about.
300         :type nodes: list
301         """
302
303         # TODO: When everything is set and running in VIRL env, transform this
304         # method to a keyword checking the environment.
305
306         cmds = ("uname -a",
307                 "df -lh",
308                 "echo $JAVA_HOME",
309                 "echo $PATH",
310                 "which java",
311                 "java -version",
312                 "dpkg --list | grep openjdk",
313                 "ls -la /opt/honeycomb")
314
315         for node in nodes:
316             if node['type'] == NodeType.DUT:
317                 logger.info("Checking node {} ...".format(node['host']))
318                 for cmd in cmds:
319                     logger.info("Command: {}".format(cmd))
320                     ssh = SSH()
321                     ssh.connect(node)
322                     ssh.exec_command_sudo(cmd)
323
324     @staticmethod
325     def print_ports(node):
326         """Uses "sudo netstat -anp | grep java" to print port where a java
327         application listens.
328
329         :param node: Honeycomb node where we want to print the ports.
330         :type node: dict
331         """
332
333         cmds = ("netstat -anp | grep java",
334                 "ps -ef | grep [h]oneycomb")
335
336         logger.info("Checking node {} ...".format(node['host']))
337         for cmd in cmds:
338             logger.info("Command: {}".format(cmd))
339             ssh = SSH()
340             ssh.connect(node)
341             ssh.exec_command_sudo(cmd)
342
343     @staticmethod
344     def configure_log_level(node, level):
345         """Set Honeycomb logging to the specified level.
346
347         :param node: Honeycomb node.
348         :param level: Log level (INFO, DEBUG, TRACE).
349         :type node: dict
350         :type level: str
351         """
352
353         find = 'logger name=\\"io.fd\\"'
354         replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level)
355
356         argument = '"/{0}/c\\ {1}"'.format(find, replace)
357         path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR)
358         command = "sed -i {0} {1}".format(argument, path)
359
360         ssh = SSH()
361         ssh.connect(node)
362         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
363         if ret_code != 0:
364             raise HoneycombError("Failed to modify configuration on "
365                                  "node {0}, {1}".format(node, stderr))
366
367     @staticmethod
368     def manage_honeycomb_features(node, feature, disable=False):
369         """Configure Honeycomb to use features that are disabled by default, or
370         disable previously enabled features.
371
372         ..Note:: If the module is not enabled in VPP, Honeycomb will
373         be unable to establish VPP connection.
374
375         :param node: Honeycomb node.
376         :param feature: Feature to enable.
377         :param disable: Disable the specified feature instead of enabling it.
378         :type node: dict
379         :type feature: string
380         :type disable: bool
381         :raises HoneycombError: If the configuration could not be changed.
382          """
383
384         disabled_features = {
385             "NSH": "io.fd.hc2vpp.vppnsh.impl.VppNshModule"
386         }
387
388         ssh = SSH()
389         ssh.connect(node)
390
391         if feature in disabled_features.keys():
392             # uncomment by replacing the entire line
393             find = replace = "{0}".format(disabled_features[feature])
394             if disable:
395                 replace = "// {0}".format(find)
396
397             argument = '"/{0}/c\\ {1}"'.format(find, replace)
398             path = "{0}/modules/*module-config"\
399                 .format(Const.REMOTE_HC_DIR)
400             command = "sed -i {0} {1}".format(argument, path)
401
402             (ret_code, _, stderr) = ssh.exec_command_sudo(command)
403             if ret_code != 0:
404                 raise HoneycombError("Failed to modify configuration on "
405                                      "node {0}, {1}".format(node, stderr))
406         else:
407             raise HoneycombError(
408                 "Unrecognized feature {0}.".format(feature))
409
410     @staticmethod
411     def copy_java_libraries(node):
412         """Copy Java libraries installed by vpp-api-java package to honeycomb
413         lib folder.
414
415         This is a (temporary?) workaround for jvpp version mismatches.
416
417         :param node: Honeycomb node
418         :type node: dict
419         """
420
421         ssh = SSH()
422         ssh.connect(node)
423         (_, stdout, _) = ssh.exec_command_sudo(
424             "ls /usr/share/java | grep ^jvpp-*")
425
426         files = stdout.split("\n")[:-1]
427         for item in files:
428             # example filenames:
429             # jvpp-registry-17.04.jar
430             # jvpp-core-17.04.jar
431
432             parts = item.split("-")
433             version = "{0}-SNAPSHOT".format(parts[2][:5])
434             artifact_id = "{0}-{1}".format(parts[0], parts[1])
435
436             directory = "{0}/lib/io/fd/vpp/{1}/{2}".format(
437                 Const.REMOTE_HC_DIR, artifact_id, version)
438             cmd = "sudo mkdir -p {0}; " \
439                   "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format(
440                       directory, item, artifact_id, version)
441
442             (ret_code, _, stderr) = ssh.exec_command(cmd)
443             if ret_code != 0:
444                 raise HoneycombError("Failed to copy JVPP libraries on "
445                                      "node {0}, {1}".format(node, stderr))
446
447     @staticmethod
448     def copy_odl_client(node, odl_name, src_path, dst_path):
449         """Copy ODL Client from source path to destination path.
450
451         :param node: Honeycomb node.
452         :param odl_name: Name of ODL client version to use.
453         :param src_path: Source Path where to find ODl client.
454         :param dst_path: Destination path.
455         :type node: dict
456         :type odl_name: str
457         :type src_path: str
458         :type dst_path: str
459         :raises HoneycombError: If the operation fails.
460         """
461
462         ssh = SSH()
463         ssh.connect(node)
464
465         cmd = "sudo rm -rf {dst}/*karaf_{odl_name} && " \
466               "cp -r {src}/*karaf_{odl_name}* {dst}".format(
467                   src=src_path, odl_name=odl_name, dst=dst_path)
468
469         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=180)
470         if int(ret_code) != 0:
471             raise HoneycombError(
472                 "Failed to copy ODL client on node {0}".format(node["host"]))
473
474     @staticmethod
475     def setup_odl_client(node, path):
476         """Start ODL client on the specified node.
477
478         Karaf should be located in the provided path, and VPP and Honeycomb
479         should already be running, otherwise the start will fail.
480         :param node: Node to start ODL client on.
481         :param path: Path to ODL client on node.
482         :type node: dict
483         :type path: str
484         :raises HoneycombError: If Honeycomb fails to start.
485         """
486
487         logger.console("\nStarting ODL client ...")
488         ssh = SSH()
489         ssh.connect(node)
490
491         cmd = "{path}/*karaf*/bin/start clean".format(path=path)
492         ret_code, _, _ = ssh.exec_command_sudo(cmd)
493
494         if int(ret_code) != 0:
495             raise HoneycombError('Node {0} failed to start ODL.'.
496                                  format(node['host']))
497         else:
498             logger.info("Starting the ODL client on node {0} is "
499                         "in progress ...".format(node['host']))
500
501     @staticmethod
502     def install_odl_features(node, path, *features):
503         """Install required features on a running ODL client.
504
505         :param node: Honeycomb node.
506         :param path: Path to ODL client on node.
507         :param features: Optional, list of additional features to install.
508         :type node: dict
509         :type path: str
510         :type features: list
511         """
512
513         ssh = SSH()
514         ssh.connect(node)
515
516         cmd = "{path}/*karaf*/bin/client -u karaf feature:install " \
517               "odl-restconf-all " \
518               "odl-netconf-connector-all " \
519               "odl-netconf-topology".format(path=path)
520         for feature in features:
521             cmd += " {0}".format(feature)
522
523         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=250)
524
525         if int(ret_code) != 0:
526             raise HoneycombError("Feature install did not succeed.")
527
528     @staticmethod
529     def check_odl_startup_state(node):
530         """Check the status of ODL client startup.
531
532         :param node: Honeycomb node.
533         :param node: dict
534         :returns: True when ODL is started.
535         :rtype: bool
536         :raises HoneycombError: When the response is not code 200: OK.
537         """
538
539         path = HcUtil.read_path_from_url_file(
540             "odl_client/odl_netconf_connector")
541         expected_status_codes = (HTTPCodes.UNAUTHORIZED,
542                                  HTTPCodes.FORBIDDEN,
543                                  HTTPCodes.NOT_FOUND,
544                                  HTTPCodes.SERVICE_UNAVAILABLE,
545                                  HTTPCodes.INTERNAL_SERVER_ERROR)
546
547         status_code, _ = HTTPRequest.get(node, path, timeout=10,
548                                          enable_logging=False)
549         if status_code == HTTPCodes.OK:
550             logger.info("ODL client on node {0} is up and running".
551                         format(node['host']))
552         elif status_code in expected_status_codes:
553             if status_code == HTTPCodes.UNAUTHORIZED:
554                 logger.info('Unauthorized. If this triggers keyword '
555                             'timeout, verify username and password.')
556             raise HoneycombError('ODL client on node {0} running but '
557                                  'not yet ready.'.format(node['host']),
558                                  enable_logging=False)
559         else:
560             raise HoneycombError('Unexpected return code: {0}.'.
561                                  format(status_code))
562         return True
563
564     @staticmethod
565     def check_odl_shutdown_state(node):
566         """Check the status of ODL client shutdown.
567
568         :param node: Honeycomb node.
569         :type node: dict
570         :returns: True when ODL is stopped.
571         :rtype: bool
572         :raises HoneycombError: When the response is not code 200: OK.
573         """
574
575         cmd = "pgrep -f karaf"
576         path = HcUtil.read_path_from_url_file(
577             "odl_client/odl_netconf_connector")
578
579         try:
580             HTTPRequest.get(node, path, timeout=10, enable_logging=False)
581             raise HoneycombError("ODL client is still running.")
582         except HTTPRequestError:
583             logger.debug("Connection refused, checking process state....")
584             ssh = SSH()
585             ssh.connect(node)
586             ret_code, _, _ = ssh.exec_command(cmd)
587             if ret_code == 0:
588                 raise HoneycombError("ODL client is still running.")
589
590         return True
591
592     @staticmethod
593     def mount_honeycomb_on_odl(node):
594         """Tell ODL client to mount Honeycomb instance over netconf.
595
596         :param node: Honeycomb node.
597         :type node: dict
598         :raises HoneycombError: When the response is not code 200: OK.
599         """
600
601         path = HcUtil.read_path_from_url_file(
602             "odl_client/odl_netconf_connector")
603
604         url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC,
605                                     "odl_client/mount_honeycomb.json")
606
607         with open(url_file) as template:
608             data = template.read()
609
610         data = loads(data)
611
612         status_code, _ = HTTPRequest.post(
613             node,
614             path,
615             headers={"Content-Type": "application/json",
616                      "Accept": "text/plain"},
617             json=data,
618             timeout=10,
619             enable_logging=False)
620
621         if status_code == HTTPCodes.OK:
622             logger.info("ODL mount point configured successfully.")
623         elif status_code == HTTPCodes.CONFLICT:
624             logger.info("ODL mount point was already configured.")
625         else:
626             raise HoneycombError('Mount point configuration not successful')
627
628     @staticmethod
629     def stop_odl_client(node, path):
630         """Stop ODL client service on the specified node.
631
632         :param node: Node to start ODL client on.
633         :param path: Path to ODL client.
634         :type node: dict
635         :type path: str
636         :raises HoneycombError: If ODL client fails to stop.
637         """
638
639         ssh = SSH()
640         ssh.connect(node)
641
642         cmd = "{0}/*karaf*/bin/stop".format(path)
643
644         ssh = SSH()
645         ssh.connect(node)
646         ret_code, _, _ = ssh.exec_command_sudo(cmd)
647         if int(ret_code) != 0:
648             logger.debug("ODL Client refused to shut down.")
649             cmd = "pkill -f 'karaf'"
650             (ret_code, _, _) = ssh.exec_command_sudo(cmd)
651             if int(ret_code) != 0:
652                 raise HoneycombError('Node {0} failed to stop ODL.'.
653                                      format(node['host']))
654
655         logger.info("ODL client service stopped.")
656
657     @staticmethod
658     def stop_vpp_service(node):
659         """Stop VPP service on the specified node.
660
661         :param node: VPP node.
662         :type node: dict
663         :raises RuntimeError: If VPP fails to stop.
664         """
665
666         ssh = SSH()
667         ssh.connect(node)
668         cmd = "service vpp stop"
669         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=80)
670         if int(ret_code) != 0:
671             logger.debug("VPP service refused to shut down.")
672
673
674 class HoneycombStartupConfig(object):
675     """Generator for Honeycomb startup configuration.
676     """
677     def __init__(self):
678         """Initializer."""
679
680         self.template = """
681         #!/bin/sh -
682         STATUS=100
683
684         while [ $STATUS -eq 100 ]
685         do
686           {java_call} -jar $(dirname $0)/{jar_filename}
687           STATUS=$?
688           echo "Honeycomb exited with status: $STATUS"
689           if [ $STATUS -eq 100 ]
690           then
691             echo "Restarting..."
692           fi
693         done
694         """
695
696         self.java_call = "{scheduler} {affinity} java{jit_mode}{params}"
697
698         self.scheduler = ""
699         self.core_affinity = ""
700         self.jit_mode = ""
701         self.params = ""
702         self.numa = ""
703
704         self.config = ""
705         self.ssh = SSH()
706
707     def apply_config(self, node):
708         """Generate configuration file /opt/honeycomb/honeycomb on the specified
709          node.
710
711          :param node: Honeycomb node.
712          :type node: dict
713          """
714
715         self.ssh.connect(node)
716         _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar")
717
718         java_call = self.java_call.format(scheduler=self.scheduler,
719                                           affinity=self.core_affinity,
720                                           jit_mode=self.jit_mode,
721                                           params=self.params)
722         self.config = self.template.format(java_call=java_call,
723                                            jar_filename=filename)
724
725         self.ssh.connect(node)
726         cmd = "echo '{config}' > /tmp/honeycomb " \
727               "&& chmod +x /tmp/honeycomb " \
728               "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".\
729             format(config=self.config)
730         self.ssh.exec_command(cmd)
731
732     def set_cpu_scheduler(self, scheduler="FIFO"):
733         """Use alternate CPU scheduler.
734
735         Note: OTHER scheduler doesn't load-balance over isolcpus.
736
737         :param scheduler: CPU scheduler to use.
738         :type scheduler: str
739         """
740
741         schedulers = {"FIFO": "-f 99",  # First In, First Out
742                       "RR": "-r 99",  # Round Robin
743                       "OTHER": "-o",  # Ubuntu default
744                      }
745         self.scheduler = "chrt {0}".format(schedulers[scheduler])
746
747     def set_cpu_core_affinity(self, low, high=None):
748         """Set core affinity for the honeycomb process and subprocesses.
749
750         :param low: Lowest core ID number.
751         :param high: Highest core ID number. Leave empty to use a single core.
752         :type low: int
753         :type high: int
754         """
755
756         self.core_affinity = "taskset -c {low}-{high}".format(
757             low=low, high=high if high else low)
758
759     def set_jit_compiler_mode(self, jit_mode):
760         """Set running mode for Java's JIT compiler.
761
762         :param jit_mode: Desiret JIT mode.
763         :type jit_mode: str
764         """
765
766         modes = {"client": " -client",  # Default
767                  "server": " -server",  # Higher performance but longer warmup
768                  "classic": " -classic"  # Disables JIT compiler
769                 }
770
771         self.jit_mode = modes[jit_mode]
772
773     def set_memory_size(self, mem_min, mem_max=None):
774         """Set minimum and maximum memory use for the JVM.
775
776         :param mem_min: Minimum amount of memory (MB).
777         :param mem_max: Maximum amount of memory (MB). Default is 4 times
778         minimum value.
779         :type mem_min: int
780         :type mem_max: int
781         """
782
783         self.params += " -Xms{min}m -Xmx{max}m".format(
784             min=mem_min, max=mem_max if mem_max else mem_min*4)
785
786     def set_metaspace_size(self, mem_min, mem_max=None):
787         """Set minimum and maximum memory used for class metadata in the JVM.
788
789         :param mem_min: Minimum metaspace size (MB).
790         :param mem_max: Maximum metaspace size (MB). Defailt is 4 times
791         minimum value.
792         :type mem_min: int
793         :type mem_max: int
794         """
795
796         self.params += " -XX:MetaspaceSize={min}m " \
797                        "-XX:MaxMetaspaceSize={max}m".format(
798                            min=mem_min, max=mem_max if mem_max else mem_min*4)
799
800     def set_numa_optimization(self):
801         """Use optimization of memory use and garbage collection for NUMA
802         architectures."""
803
804         self.params += " -XX:+UseNUMA -XX:+UseParallelGC"
805
806     def set_ssh_security_provider(self):
807         """Disables BouncyCastle for SSHD."""
808         # Workaround for issue described in:
809         # https://wiki.fd.io/view/Honeycomb/Releases/1609/Honeycomb_and_ODL
810
811         self.params += " -Dorg.apache.sshd.registerBouncyCastle=false"