HC Test: Update list of Honeycomb BGP modules
[csit.git] / resources / libraries / python / honeycomb / HoneycombSetup.py
1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
5 #
6 #     http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
13
14 """Implementation of keywords for Honeycomb setup."""
15
16 from json import loads
17 from time import time, sleep
18
19 from ipaddress import IPv6Address, AddressValueError
20
21 from robot.api import logger
22
23 from resources.libraries.python.HTTPRequest import HTTPRequest, HTTPCodes, \
24     HTTPRequestError
25 from resources.libraries.python.constants import Constants as Const
26 from resources.libraries.python.honeycomb.HoneycombUtil import HoneycombError
27 from resources.libraries.python.honeycomb.HoneycombUtil \
28     import HoneycombUtil as HcUtil
29 from resources.libraries.python.ssh import SSH
30 from resources.libraries.python.topology import NodeType
31
32
33 class HoneycombSetup(object):
34     """Implements keywords for Honeycomb setup.
35
36     The keywords implemented in this class make possible to:
37     - start Honeycomb,
38     - stop Honeycomb,
39     - check the Honeycomb start-up state,
40     - check the Honeycomb shutdown state,
41     - add VPP to the topology.
42     """
43
44     def __init__(self):
45         pass
46
47     @staticmethod
48     def start_honeycomb_on_duts(*nodes):
49         """Start Honeycomb on specified DUT nodes.
50
51         This keyword starts the Honeycomb service on specified DUTs.
52         The keyword just starts the Honeycomb and does not check its startup
53         state. Use the keyword "Check Honeycomb Startup State" to check if the
54         Honeycomb is up and running.
55         Honeycomb must be installed in "/opt" directory, otherwise the start
56         will fail.
57         :param nodes: List of nodes to start Honeycomb on.
58         :type nodes: list
59         :raises HoneycombError: If Honeycomb fails to start.
60         """
61
62         HoneycombSetup.print_environment(nodes)
63
64         logger.console("\n(re)Starting Honeycomb service ...")
65
66         cmd = "sudo service honeycomb start"
67
68         for node in nodes:
69             if node['type'] == NodeType.DUT:
70                 ssh = SSH()
71                 ssh.connect(node)
72                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
73                 if int(ret_code) != 0:
74                     raise HoneycombError('Node {0} failed to start Honeycomb.'.
75                                          format(node['host']))
76                 else:
77                     logger.info("Starting the Honeycomb service on node {0} is "
78                                 "in progress ...".format(node['host']))
79
80     @staticmethod
81     def stop_honeycomb_on_duts(*nodes):
82         """Stop the Honeycomb service on specified DUT nodes.
83
84         This keyword stops the Honeycomb service on specified nodes. It just
85         stops the Honeycomb and does not check its shutdown state. Use the
86         keyword "Check Honeycomb Shutdown State" to check if Honeycomb has
87         stopped.
88         :param nodes: List of nodes to stop Honeycomb on.
89         :type nodes: list
90         :raises HoneycombError: If Honeycomb failed to stop.
91         """
92         logger.console("\nShutting down Honeycomb service ...")
93
94         cmd = "sudo service honeycomb stop"
95         errors = []
96
97         for node in nodes:
98             if node['type'] == NodeType.DUT:
99                 ssh = SSH()
100                 ssh.connect(node)
101                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
102                 if int(ret_code) != 0:
103                     errors.append(node['host'])
104                 else:
105                     logger.info("Stopping the Honeycomb service on node {0} is "
106                                 "in progress ...".format(node['host']))
107         if errors:
108             raise HoneycombError('Node(s) {0} failed to stop Honeycomb.'.
109                                  format(errors))
110
111     @staticmethod
112     def restart_honeycomb_on_dut(node):
113         """Restart Honeycomb on specified DUT nodes.
114
115         This keyword restarts the Honeycomb service on specified DUTs. Use the
116         keyword "Check Honeycomb Startup State" to check if the Honeycomb is up
117         and running.
118
119         :param node: Node to restart Honeycomb on.
120         :type node: dict
121         :raises HoneycombError: If Honeycomb fails to start.
122         """
123
124         logger.console("\n(re)Starting Honeycomb service ...")
125
126         cmd = "sudo service honeycomb restart"
127
128         ssh = SSH()
129         ssh.connect(node)
130         (ret_code, _, _) = ssh.exec_command_sudo(cmd)
131         if int(ret_code) != 0:
132             raise HoneycombError('Node {0} failed to restart Honeycomb.'.
133                                  format(node['host']))
134         else:
135             logger.info(
136                 "Honeycomb service restart is in progress on node {0}".format(
137                     node['host']))
138
139     @staticmethod
140     def check_honeycomb_startup_state(node, timeout=360, retries=20,
141                                       interval=15):
142         """Repeatedly check the status of Honeycomb startup until it is fully
143         started or until timeout or max retries is reached.
144
145         :param node: Honeycomb node.
146         :param timeout: Timeout value in seconds.
147         :param retries: Max number of retries.
148         :param interval: Interval between checks, in seconds.
149         :type node: dict
150         :type timeout: int
151         :type retries: int
152         :type interval: int
153         :raises HoneycombError: If the Honeycomb process IP cannot be found,
154         or if timeout or number of retries is exceeded."""
155
156         ssh = SSH()
157         ssh.connect(node)
158
159         count = 0
160         start = time()
161         while time() - start < timeout and count < retries:
162             count += 1
163
164             try:
165                 status_code_version, _ = HcUtil.get_honeycomb_data(
166                     node, "oper_vpp_version")
167                 status_code_if_cfg, _ = HcUtil.get_honeycomb_data(
168                     node, "config_vpp_interfaces")
169                 status_code_if_oper, _ = HcUtil.get_honeycomb_data(
170                     node, "oper_vpp_interfaces")
171             except HTTPRequestError:
172                 sleep(interval)
173                 continue
174             if status_code_if_cfg == HTTPCodes.OK\
175                     and status_code_if_cfg == HTTPCodes.OK\
176                     and status_code_if_oper == HTTPCodes.OK:
177                 logger.info("Check successful, Honeycomb is up and running.")
178                 break
179             else:
180                 logger.debug(
181                     "Attempt ${count} failed on Restconf check. Status codes:\n"
182                     "Version: {version}\n"
183                     "Interface config: {if_cfg}\n"
184                     "Interface operational: {if_oper}".format(
185                         count=count,
186                         version=status_code_version,
187                         if_cfg=status_code_if_cfg,
188                         if_oper=status_code_if_oper))
189                 sleep(interval)
190                 continue
191         else:
192             _, vpp_status, _ = ssh.exec_command("sudo service vpp status")
193             raise HoneycombError(
194                 "Timeout or max retries exceeded. Status of VPP:\n"
195                 "{vpp_status}".format(vpp_status=vpp_status))
196
197     @staticmethod
198     def check_honeycomb_shutdown_state(node):
199         """Check state of Honeycomb service during shutdown on specified nodes.
200
201         Honeycomb nodes reply with connection refused or the following status
202         codes depending on shutdown progress: codes 200, 404.
203
204         :param node: List of DUT nodes stopping Honeycomb.
205         :type node: dict
206         :return: True if all GETs fail to connect.
207         :rtype bool
208         """
209         cmd = "pgrep honeycomb"
210
211         ssh = SSH()
212         ssh.connect(node)
213         (ret_code, _, _) = ssh.exec_command_sudo(cmd)
214         if ret_code == 0:
215             raise HoneycombError('Honeycomb on node {0} is still '
216                                  'running.'.format(node['host']),
217                                  enable_logging=False)
218         else:
219             logger.info("Honeycomb on node {0} has stopped".
220                         format(node['host']))
221         return True
222
223     @staticmethod
224     def configure_restconf_binding_address(node):
225         """Configure Honeycomb to accept restconf requests from all IP
226         addresses. IP version is determined by node data.
227
228          :param node: Information about a DUT node.
229          :type node: dict
230          :raises HoneycombError: If the configuration could not be changed.
231          """
232
233         find = "restconf-binding-address"
234         try:
235             IPv6Address(unicode(node["host"]))
236             # if management IP of the node is in IPv6 format
237             replace = '\\"restconf-binding-address\\": \\"0::0\\",'
238         except (AttributeError, AddressValueError):
239             replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",'
240
241         argument = '"/{0}/c\\ {1}"'.format(find, replace)
242         path = "{0}/config/restconf.json".format(Const.REMOTE_HC_DIR)
243         command = "sed -i {0} {1}".format(argument, path)
244
245         ssh = SSH()
246         ssh.connect(node)
247         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
248         if ret_code != 0:
249             raise HoneycombError("Failed to modify configuration on "
250                                  "node {0}, {1}".format(node, stderr))
251
252     @staticmethod
253     def configure_jvpp_timeout(node, timeout=10):
254         """Configure timeout value for Java API commands Honeycomb sends to VPP.
255
256          :param node: Information about a DUT node.
257          :param timeout: Timeout value in seconds.
258          :type node: dict
259          :type timeout: int
260          :raises HoneycombError: If the configuration could not be changed.
261          """
262
263         find = "jvpp-request-timeout"
264         replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout)
265
266         argument = '"/{0}/c\\ {1}"'.format(find, replace)
267         path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR)
268         command = "sed -i {0} {1}".format(argument, path)
269
270         ssh = SSH()
271         ssh.connect(node)
272         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
273         if ret_code != 0:
274             raise HoneycombError("Failed to modify configuration on "
275                                  "node {0}, {1}".format(node, stderr))
276
277     @staticmethod
278     def print_environment(nodes):
279         """Print information about the nodes to log. The information is defined
280         by commands in cmds tuple at the beginning of this method.
281
282         :param nodes: List of DUT nodes to get information about.
283         :type nodes: list
284         """
285
286         # TODO: When everything is set and running in VIRL env, transform this
287         # method to a keyword checking the environment.
288
289         cmds = ("uname -a",
290                 "df -lh",
291                 "echo $JAVA_HOME",
292                 "echo $PATH",
293                 "which java",
294                 "java -version",
295                 "dpkg --list | grep openjdk",
296                 "ls -la /opt/honeycomb",
297                 "cat /opt/honeycomb/modules/*module-config")
298
299         for node in nodes:
300             if node['type'] == NodeType.DUT:
301                 logger.info("Checking node {} ...".format(node['host']))
302                 for cmd in cmds:
303                     logger.info("Command: {}".format(cmd))
304                     ssh = SSH()
305                     ssh.connect(node)
306                     ssh.exec_command_sudo(cmd)
307
308     @staticmethod
309     def print_ports(node):
310         """Uses "sudo netstat -anp | grep java" to print port where a java
311         application listens.
312
313         :param node: Honeycomb node where we want to print the ports.
314         :type node: dict
315         """
316
317         cmds = ("netstat -anp | grep java",
318                 "ps -ef | grep [h]oneycomb")
319
320         logger.info("Checking node {} ...".format(node['host']))
321         for cmd in cmds:
322             logger.info("Command: {}".format(cmd))
323             ssh = SSH()
324             ssh.connect(node)
325             ssh.exec_command_sudo(cmd)
326
327     @staticmethod
328     def configure_log_level(node, level):
329         """Set Honeycomb logging to the specified level.
330
331         :param node: Honeycomb node.
332         :param level: Log level (INFO, DEBUG, TRACE).
333         :type node: dict
334         :type level: str
335         """
336
337         find = 'logger name=\\"io.fd\\"'
338         replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level)
339
340         argument = '"/{0}/c\\ {1}"'.format(find, replace)
341         path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR)
342         command = "sed -i {0} {1}".format(argument, path)
343
344         ssh = SSH()
345         ssh.connect(node)
346         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
347         if ret_code != 0:
348             raise HoneycombError("Failed to modify configuration on "
349                                  "node {0}, {1}".format(node, stderr))
350
351     @staticmethod
352     def manage_honeycomb_features(node, feature, disable=False):
353         """Configure Honeycomb to use features that are disabled by default, or
354         disable previously enabled features.
355
356         ..Note:: If the module is not enabled in VPP, Honeycomb will
357         be unable to establish VPP connection.
358
359         :param node: Honeycomb node.
360         :param feature: Feature to enable.
361         :param disable: Disable the specified feature instead of enabling it.
362         :type node: dict
363         :type feature: string
364         :type disable: bool
365         :raises HoneycombError: If the configuration could not be changed.
366          """
367
368         disabled_features = {
369             "NSH": ["io.fd.hc2vpp.vppnsh.impl.VppNshModule"],
370             "BGP": ["io.fd.hc2vpp.bgp.inet.BgpInetModule",
371                     "io.fd.honeycomb.infra.bgp.BgpModule",
372                     "io.fd.honeycomb.infra.bgp.BgpReadersModule",
373                     "io.fd.honeycomb.infra.bgp.BgpWritersModule",
374                     "io.fd.honeycomb.northbound.bgp.extension.InetModule",
375                     "io.fd.honeycomb.northbound.bgp.extension.EvpnModule",
376                     "io.fd.honeycomb.northbound.bgp.extension.L3VpnV4Module",
377                     "io.fd.honeycomb.northbound.bgp.extension.L3VpnV6Module",
378                     "io.fd.honeycomb.northbound.bgp.extension."
379                     "LabeledUnicastModule",
380                     "io.fd.honeycomb.northbound.bgp.extension.LinkstateModule"]
381         }
382
383         ssh = SSH()
384         ssh.connect(node)
385
386         if feature in disabled_features.keys():
387             # for every module, uncomment by replacing the entire line
388             for item in disabled_features[feature]:
389                 find = replace = "{0}".format(item)
390                 if disable:
391                     replace = "// {0}".format(find)
392
393                 argument = '"/{0}/c\\ {1}"'.format(find, replace)
394                 path = "{0}/modules/*module-config"\
395                     .format(Const.REMOTE_HC_DIR)
396                 command = "sed -i {0} {1}".format(argument, path)
397
398                 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
399                 if ret_code != 0:
400                     raise HoneycombError("Failed to modify configuration on "
401                                          "node {0}, {1}".format(node, stderr))
402         else:
403             raise HoneycombError(
404                 "Unrecognized feature {0}.".format(feature))
405
406     @staticmethod
407     def copy_java_libraries(node):
408         """Copy Java libraries installed by vpp-api-java package to honeycomb
409         lib folder.
410
411         This is a (temporary?) workaround for jvpp version mismatches.
412
413         :param node: Honeycomb node
414         :type node: dict
415         """
416
417         ssh = SSH()
418         ssh.connect(node)
419         (_, stdout, _) = ssh.exec_command_sudo(
420             "ls /usr/share/java | grep ^jvpp-*")
421
422         files = stdout.split("\n")[:-1]
423         for item in files:
424             # example filenames:
425             # jvpp-registry-17.04.jar
426             # jvpp-core-17.04.jar
427
428             parts = item.split("-")
429             version = "{0}-SNAPSHOT".format(parts[2][:5])
430             artifact_id = "{0}-{1}".format(parts[0], parts[1])
431
432             directory = "{0}/lib/io/fd/vpp/{1}/{2}".format(
433                 Const.REMOTE_HC_DIR, artifact_id, version)
434             cmd = "sudo mkdir -p {0}; " \
435                   "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format(
436                       directory, item, artifact_id, version)
437
438             (ret_code, _, stderr) = ssh.exec_command(cmd)
439             if ret_code != 0:
440                 raise HoneycombError("Failed to copy JVPP libraries on "
441                                      "node {0}, {1}".format(node, stderr))
442
443     @staticmethod
444     def copy_odl_client(node, odl_name, src_path, dst_path):
445         """Copy ODL Client from source path to destination path.
446
447         :param node: Honeycomb node.
448         :param odl_name: Name of ODL client version to use.
449         :param src_path: Source Path where to find ODl client.
450         :param dst_path: Destination path.
451         :type node: dict
452         :type odl_name: str
453         :type src_path: str
454         :type dst_path: str
455         :raises HoneycombError: If the operation fails.
456         """
457
458         ssh = SSH()
459         ssh.connect(node)
460
461         cmd = "sudo rm -rf {dst}/*karaf_{odl_name} && " \
462               "cp -r {src}/*karaf_{odl_name}* {dst}".format(
463                   src=src_path, odl_name=odl_name, dst=dst_path)
464
465         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=180)
466         if int(ret_code) != 0:
467             raise HoneycombError(
468                 "Failed to copy ODL client on node {0}".format(node["host"]))
469
470     @staticmethod
471     def setup_odl_client(node, path):
472         """Start ODL client on the specified node.
473
474         Karaf should be located in the provided path, and VPP and Honeycomb
475         should already be running, otherwise the start will fail.
476         :param node: Node to start ODL client on.
477         :param path: Path to ODL client on node.
478         :type node: dict
479         :type path: str
480         :raises HoneycombError: If Honeycomb fails to start.
481         """
482
483         logger.console("\nStarting ODL client ...")
484         ssh = SSH()
485         ssh.connect(node)
486
487         cmd = "{path}/*karaf*/bin/start clean".format(path=path)
488         ret_code, _, _ = ssh.exec_command_sudo(cmd)
489
490         if int(ret_code) != 0:
491             raise HoneycombError('Node {0} failed to start ODL.'.
492                                  format(node['host']))
493         else:
494             logger.info("Starting the ODL client on node {0} is "
495                         "in progress ...".format(node['host']))
496
497     @staticmethod
498     def install_odl_features(node, path, *features):
499         """Install required features on a running ODL client.
500
501         :param node: Honeycomb node.
502         :param path: Path to ODL client on node.
503         :param features: Optional, list of additional features to install.
504         :type node: dict
505         :type path: str
506         :type features: list
507         """
508
509         ssh = SSH()
510         ssh.connect(node)
511
512         cmd = "{path}/*karaf*/bin/client -u karaf feature:install " \
513               "odl-restconf-all " \
514               "odl-netconf-connector-all " \
515               "odl-netconf-topology".format(path=path)
516         for feature in features:
517             cmd += " {0}".format(feature)
518
519         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=250)
520
521         if int(ret_code) != 0:
522             raise HoneycombError("Feature install did not succeed.")
523
524     @staticmethod
525     def check_odl_startup_state(node):
526         """Check the status of ODL client startup.
527
528         :param node: Honeycomb node.
529         :param node: dict
530         :returns: True when ODL is started.
531         :rtype: bool
532         :raises HoneycombError: When the response is not code 200: OK.
533         """
534
535         path = HcUtil.read_path_from_url_file(
536             "odl_client/odl_netconf_connector")
537         expected_status_codes = (HTTPCodes.UNAUTHORIZED,
538                                  HTTPCodes.FORBIDDEN,
539                                  HTTPCodes.NOT_FOUND,
540                                  HTTPCodes.SERVICE_UNAVAILABLE,
541                                  HTTPCodes.INTERNAL_SERVER_ERROR)
542
543         status_code, _ = HTTPRequest.get(node, path, timeout=10,
544                                          enable_logging=False)
545         if status_code == HTTPCodes.OK:
546             logger.info("ODL client on node {0} is up and running".
547                         format(node['host']))
548         elif status_code in expected_status_codes:
549             if status_code == HTTPCodes.UNAUTHORIZED:
550                 logger.info('Unauthorized. If this triggers keyword '
551                             'timeout, verify username and password.')
552             raise HoneycombError('ODL client on node {0} running but '
553                                  'not yet ready.'.format(node['host']),
554                                  enable_logging=False)
555         else:
556             raise HoneycombError('Unexpected return code: {0}.'.
557                                  format(status_code))
558         return True
559
560     @staticmethod
561     def check_odl_shutdown_state(node):
562         """Check the status of ODL client shutdown.
563
564         :param node: Honeycomb node.
565         :type node: dict
566         :returns: True when ODL is stopped.
567         :rtype: bool
568         :raises HoneycombError: When the response is not code 200: OK.
569         """
570
571         cmd = "pgrep -f karaf"
572         path = HcUtil.read_path_from_url_file(
573             "odl_client/odl_netconf_connector")
574
575         try:
576             HTTPRequest.get(node, path, timeout=10, enable_logging=False)
577             raise HoneycombError("ODL client is still running.")
578         except HTTPRequestError:
579             logger.debug("Connection refused, checking process state....")
580             ssh = SSH()
581             ssh.connect(node)
582             ret_code, _, _ = ssh.exec_command(cmd)
583             if ret_code == 0:
584                 raise HoneycombError("ODL client is still running.")
585
586         return True
587
588     @staticmethod
589     def mount_honeycomb_on_odl(node):
590         """Tell ODL client to mount Honeycomb instance over netconf.
591
592         :param node: Honeycomb node.
593         :type node: dict
594         :raises HoneycombError: When the response is not code 200: OK.
595         """
596
597         path = HcUtil.read_path_from_url_file(
598             "odl_client/odl_netconf_connector")
599
600         url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC,
601                                     "odl_client/mount_honeycomb.json")
602
603         with open(url_file) as template:
604             data = template.read()
605
606         data = loads(data)
607
608         status_code, _ = HTTPRequest.post(
609             node,
610             path,
611             headers={"Content-Type": "application/json",
612                      "Accept": "text/plain"},
613             json=data,
614             timeout=10,
615             enable_logging=False)
616
617         if status_code == HTTPCodes.OK:
618             logger.info("ODL mount point configured successfully.")
619         elif status_code == HTTPCodes.CONFLICT:
620             logger.info("ODL mount point was already configured.")
621         else:
622             raise HoneycombError('Mount point configuration not successful')
623
624     @staticmethod
625     def stop_odl_client(node, path):
626         """Stop ODL client service on the specified node.
627
628         :param node: Node to start ODL client on.
629         :param path: Path to ODL client.
630         :type node: dict
631         :type path: str
632         :raises HoneycombError: If ODL client fails to stop.
633         """
634
635         ssh = SSH()
636         ssh.connect(node)
637
638         cmd = "{0}/*karaf*/bin/stop".format(path)
639
640         ssh = SSH()
641         ssh.connect(node)
642         ret_code, _, _ = ssh.exec_command_sudo(cmd)
643         if int(ret_code) != 0:
644             logger.debug("ODL Client refused to shut down.")
645             cmd = "pkill -f 'karaf'"
646             (ret_code, _, _) = ssh.exec_command_sudo(cmd)
647             if int(ret_code) != 0:
648                 raise HoneycombError('Node {0} failed to stop ODL.'.
649                                      format(node['host']))
650
651         logger.info("ODL client service stopped.")
652
653
654
655 class HoneycombStartupConfig(object):
656     """Generator for Honeycomb startup configuration.
657     """
658     def __init__(self):
659         """Initializer."""
660
661         self.template = """#!/bin/sh -
662         STATUS=100
663
664         while [ $STATUS -eq 100 ]
665         do
666           {java_call} -jar $(dirname $0)/{jar_filename}
667           STATUS=$?
668           echo "Honeycomb exited with status: $STATUS"
669           if [ $STATUS -eq 100 ]
670           then
671             echo "Restarting..."
672           fi
673         done
674         """
675
676         self.java_call = "{scheduler} {affinity} java{jit_mode}{params}"
677
678         self.scheduler = ""
679         self.core_affinity = ""
680         self.jit_mode = ""
681         self.params = ""
682         self.numa = ""
683
684         self.config = ""
685         self.ssh = SSH()
686
687     def apply_config(self, node):
688         """Generate configuration file /opt/honeycomb/honeycomb on the specified
689          node.
690
691          :param node: Honeycomb node.
692          :type node: dict
693          """
694
695         self.ssh.connect(node)
696         _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar")
697
698         java_call = self.java_call.format(scheduler=self.scheduler,
699                                           affinity=self.core_affinity,
700                                           jit_mode=self.jit_mode,
701                                           params=self.params)
702         self.config = self.template.format(java_call=java_call,
703                                            jar_filename=filename)
704
705         self.ssh.connect(node)
706         cmd = "echo '{config}' > /tmp/honeycomb " \
707               "&& chmod +x /tmp/honeycomb " \
708               "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".\
709             format(config=self.config)
710         self.ssh.exec_command(cmd)
711
712     def set_cpu_scheduler(self, scheduler="FIFO"):
713         """Use alternate CPU scheduler.
714
715         Note: OTHER scheduler doesn't load-balance over isolcpus.
716
717         :param scheduler: CPU scheduler to use.
718         :type scheduler: str
719         """
720
721         schedulers = {"FIFO": "-f 99",  # First In, First Out
722                       "RR": "-r 99",  # Round Robin
723                       "OTHER": "-o",  # Ubuntu default
724                      }
725         self.scheduler = "chrt {0}".format(schedulers[scheduler])
726
727     def set_cpu_core_affinity(self, low, high=None):
728         """Set core affinity for the honeycomb process and subprocesses.
729
730         :param low: Lowest core ID number.
731         :param high: Highest core ID number. Leave empty to use a single core.
732         :type low: int
733         :type high: int
734         """
735
736         self.core_affinity = "taskset -c {low}-{high}".format(
737             low=low, high=high if high else low)
738
739     def set_jit_compiler_mode(self, jit_mode):
740         """Set running mode for Java's JIT compiler.
741
742         :param jit_mode: Desiret JIT mode.
743         :type jit_mode: str
744         """
745
746         modes = {"client": " -client",  # Default
747                  "server": " -server",  # Higher performance but longer warmup
748                  "classic": " -classic"  # Disables JIT compiler
749                 }
750
751         self.jit_mode = modes[jit_mode]
752
753     def set_memory_size(self, mem_min, mem_max=None):
754         """Set minimum and maximum memory use for the JVM.
755
756         :param mem_min: Minimum amount of memory (MB).
757         :param mem_max: Maximum amount of memory (MB). Default is 4 times
758         minimum value.
759         :type mem_min: int
760         :type mem_max: int
761         """
762
763         self.params += " -Xms{min}m -Xmx{max}m".format(
764             min=mem_min, max=mem_max if mem_max else mem_min*4)
765
766     def set_metaspace_size(self, mem_min, mem_max=None):
767         """Set minimum and maximum memory used for class metadata in the JVM.
768
769         :param mem_min: Minimum metaspace size (MB).
770         :param mem_max: Maximum metaspace size (MB). Defailt is 4 times
771         minimum value.
772         :type mem_min: int
773         :type mem_max: int
774         """
775
776         self.params += " -XX:MetaspaceSize={min}m " \
777                        "-XX:MaxMetaspaceSize={max}m".format(
778                            min=mem_min, max=mem_max if mem_max else mem_min*4)
779
780     def set_numa_optimization(self):
781         """Use optimization of memory use and garbage collection for NUMA
782         architectures."""
783
784         self.params += " -XX:+UseNUMA -XX:+UseParallelGC"
785
786     def set_ssh_security_provider(self):
787         """Disables BouncyCastle for SSHD."""
788         # Workaround for issue described in:
789         # https://wiki.fd.io/view/Honeycomb/Releases/1609/Honeycomb_and_ODL
790
791         self.params += " -Dorg.apache.sshd.registerBouncyCastle=false"