CSIT-811 HC Test: BGP tests - IPv4 CRUD
[csit.git] / resources / libraries / python / honeycomb / HoneycombSetup.py
1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
5 #
6 #     http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
13
14 """Implementation of keywords for Honeycomb setup."""
15
16 from json import loads
17 from time import time, sleep
18
19 from ipaddress import IPv6Address, AddressValueError
20
21 from robot.api import logger
22
23 from resources.libraries.python.HTTPRequest import HTTPRequest, HTTPCodes, \
24     HTTPRequestError
25 from resources.libraries.python.constants import Constants as Const
26 from resources.libraries.python.honeycomb.HoneycombUtil import HoneycombError
27 from resources.libraries.python.honeycomb.HoneycombUtil \
28     import HoneycombUtil as HcUtil
29 from resources.libraries.python.ssh import SSH
30 from resources.libraries.python.topology import NodeType
31
32
33 class HoneycombSetup(object):
34     """Implements keywords for Honeycomb setup.
35
36     The keywords implemented in this class make possible to:
37     - start Honeycomb,
38     - stop Honeycomb,
39     - check the Honeycomb start-up state,
40     - check the Honeycomb shutdown state,
41     - add VPP to the topology.
42     """
43
44     def __init__(self):
45         pass
46
47     @staticmethod
48     def start_honeycomb_on_duts(*nodes):
49         """Start Honeycomb on specified DUT nodes.
50
51         This keyword starts the Honeycomb service on specified DUTs.
52         The keyword just starts the Honeycomb and does not check its startup
53         state. Use the keyword "Check Honeycomb Startup State" to check if the
54         Honeycomb is up and running.
55         Honeycomb must be installed in "/opt" directory, otherwise the start
56         will fail.
57         :param nodes: List of nodes to start Honeycomb on.
58         :type nodes: list
59         :raises HoneycombError: If Honeycomb fails to start.
60         """
61
62         HoneycombSetup.print_environment(nodes)
63
64         logger.console("\n(re)Starting Honeycomb service ...")
65
66         cmd = "sudo service honeycomb start"
67
68         for node in nodes:
69             if node['type'] == NodeType.DUT:
70                 ssh = SSH()
71                 ssh.connect(node)
72                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
73                 if int(ret_code) != 0:
74                     raise HoneycombError('Node {0} failed to start Honeycomb.'.
75                                          format(node['host']))
76                 else:
77                     logger.info("Starting the Honeycomb service on node {0} is "
78                                 "in progress ...".format(node['host']))
79
80     @staticmethod
81     def stop_honeycomb_on_duts(*nodes):
82         """Stop the Honeycomb service on specified DUT nodes.
83
84         This keyword stops the Honeycomb service on specified nodes. It just
85         stops the Honeycomb and does not check its shutdown state. Use the
86         keyword "Check Honeycomb Shutdown State" to check if Honeycomb has
87         stopped.
88         :param nodes: List of nodes to stop Honeycomb on.
89         :type nodes: list
90         :raises HoneycombError: If Honeycomb failed to stop.
91         """
92         logger.console("\nShutting down Honeycomb service ...")
93
94         cmd = "sudo service honeycomb stop"
95         errors = []
96
97         for node in nodes:
98             if node['type'] == NodeType.DUT:
99                 ssh = SSH()
100                 ssh.connect(node)
101                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
102                 if int(ret_code) != 0:
103                     errors.append(node['host'])
104                 else:
105                     logger.info("Stopping the Honeycomb service on node {0} is "
106                                 "in progress ...".format(node['host']))
107         if errors:
108             raise HoneycombError('Node(s) {0} failed to stop Honeycomb.'.
109                                  format(errors))
110
111     @staticmethod
112     def restart_honeycomb_on_dut(node):
113         """Restart Honeycomb on specified DUT nodes.
114
115         This keyword restarts the Honeycomb service on specified DUTs. Use the
116         keyword "Check Honeycomb Startup State" to check if the Honeycomb is up
117         and running.
118
119         :param node: Node to restart Honeycomb on.
120         :type node: dict
121         :raises HoneycombError: If Honeycomb fails to start.
122         """
123
124         logger.console("\n(re)Starting Honeycomb service ...")
125
126         cmd = "sudo service honeycomb restart"
127
128         ssh = SSH()
129         ssh.connect(node)
130         (ret_code, _, _) = ssh.exec_command_sudo(cmd)
131         if int(ret_code) != 0:
132             raise HoneycombError('Node {0} failed to restart Honeycomb.'.
133                                  format(node['host']))
134         else:
135             logger.info(
136                 "Honeycomb service restart is in progress on node {0}".format(
137                     node['host']))
138
139     @staticmethod
140     def check_honeycomb_startup_state(node, timeout=360, retries=20,
141                                       interval=15):
142         """Repeatedly check the status of Honeycomb startup until it is fully
143         started or until timeout or max retries is reached.
144
145         :param node: Honeycomb node.
146         :param timeout: Timeout value in seconds.
147         :param retries: Max number of retries.
148         :param interval: Interval between checks, in seconds.
149         :type node: dict
150         :type timeout: int
151         :type retries: int
152         :type interval: int
153         :raises HoneycombError: If the Honeycomb process IP cannot be found,
154         or if timeout or number of retries is exceeded."""
155
156         ssh = SSH()
157         ssh.connect(node)
158
159         count = 0
160         start = time()
161         while time() - start < timeout and count < retries:
162             count += 1
163
164             try:
165                 status_code_version, _ = HcUtil.get_honeycomb_data(
166                     node, "oper_vpp_version")
167                 status_code_if_cfg, _ = HcUtil.get_honeycomb_data(
168                     node, "config_vpp_interfaces")
169                 status_code_if_oper, _ = HcUtil.get_honeycomb_data(
170                     node, "oper_vpp_interfaces")
171             except HTTPRequestError:
172                 sleep(interval)
173                 continue
174             if status_code_if_cfg == HTTPCodes.OK\
175                     and status_code_if_cfg == HTTPCodes.OK\
176                     and status_code_if_oper == HTTPCodes.OK:
177                 logger.info("Check successful, Honeycomb is up and running.")
178                 break
179             else:
180                 logger.debug(
181                     "Attempt ${count} failed on Restconf check. Status codes:\n"
182                     "Version: {version}\n"
183                     "Interface config: {if_cfg}\n"
184                     "Interface operational: {if_oper}".format(
185                         count=count,
186                         version=status_code_version,
187                         if_cfg=status_code_if_cfg,
188                         if_oper=status_code_if_oper))
189                 sleep(interval)
190                 continue
191         else:
192             _, vpp_status, _ = ssh.exec_command("sudo service vpp status")
193             raise HoneycombError(
194                 "Timeout or max retries exceeded. Status of VPP:\n"
195                 "{vpp_status}".format(vpp_status=vpp_status))
196
197     @staticmethod
198     def check_honeycomb_shutdown_state(node):
199         """Check state of Honeycomb service during shutdown on specified nodes.
200
201         Honeycomb nodes reply with connection refused or the following status
202         codes depending on shutdown progress: codes 200, 404.
203
204         :param node: List of DUT nodes stopping Honeycomb.
205         :type node: dict
206         :return: True if all GETs fail to connect.
207         :rtype bool
208         """
209         cmd = "pgrep honeycomb"
210
211         ssh = SSH()
212         ssh.connect(node)
213         (ret_code, _, _) = ssh.exec_command_sudo(cmd)
214         if ret_code == 0:
215             raise HoneycombError('Honeycomb on node {0} is still '
216                                  'running.'.format(node['host']),
217                                  enable_logging=False)
218         else:
219             logger.info("Honeycomb on node {0} has stopped".
220                         format(node['host']))
221         return True
222
223     @staticmethod
224     def configure_restconf_binding_address(node):
225         """Configure Honeycomb to accept restconf requests from all IP
226         addresses. IP version is determined by node data.
227
228          :param node: Information about a DUT node.
229          :type node: dict
230          :raises HoneycombError: If the configuration could not be changed.
231          """
232
233         find = "restconf-binding-address"
234         try:
235             IPv6Address(unicode(node["host"]))
236             # if management IP of the node is in IPv6 format
237             replace = '\\"restconf-binding-address\\": \\"0::0\\",'
238         except (AttributeError, AddressValueError):
239             replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",'
240
241         argument = '"/{0}/c\\ {1}"'.format(find, replace)
242         path = "{0}/config/restconf.json".format(Const.REMOTE_HC_DIR)
243         command = "sed -i {0} {1}".format(argument, path)
244
245         ssh = SSH()
246         ssh.connect(node)
247         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
248         if ret_code != 0:
249             raise HoneycombError("Failed to modify configuration on "
250                                  "node {0}, {1}".format(node, stderr))
251
252     @staticmethod
253     def configure_jvpp_timeout(node, timeout=10):
254         """Configure timeout value for Java API commands Honeycomb sends to VPP.
255
256          :param node: Information about a DUT node.
257          :param timeout: Timeout value in seconds.
258          :type node: dict
259          :type timeout: int
260          :raises HoneycombError: If the configuration could not be changed.
261          """
262
263         find = "jvpp-request-timeout"
264         replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout)
265
266         argument = '"/{0}/c\\ {1}"'.format(find, replace)
267         path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR)
268         command = "sed -i {0} {1}".format(argument, path)
269
270         ssh = SSH()
271         ssh.connect(node)
272         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
273         if ret_code != 0:
274             raise HoneycombError("Failed to modify configuration on "
275                                  "node {0}, {1}".format(node, stderr))
276
277     @staticmethod
278     def print_environment(nodes):
279         """Print information about the nodes to log. The information is defined
280         by commands in cmds tuple at the beginning of this method.
281
282         :param nodes: List of DUT nodes to get information about.
283         :type nodes: list
284         """
285
286         # TODO: When everything is set and running in VIRL env, transform this
287         # method to a keyword checking the environment.
288
289         cmds = ("uname -a",
290                 "df -lh",
291                 "echo $JAVA_HOME",
292                 "echo $PATH",
293                 "which java",
294                 "java -version",
295                 "dpkg --list | grep openjdk",
296                 "ls -la /opt/honeycomb",
297                 "cat /opt/honeycomb/modules/*module-config")
298
299         for node in nodes:
300             if node['type'] == NodeType.DUT:
301                 logger.info("Checking node {} ...".format(node['host']))
302                 for cmd in cmds:
303                     logger.info("Command: {}".format(cmd))
304                     ssh = SSH()
305                     ssh.connect(node)
306                     ssh.exec_command_sudo(cmd)
307
308     @staticmethod
309     def print_ports(node):
310         """Uses "sudo netstat -anp | grep java" to print port where a java
311         application listens.
312
313         :param node: Honeycomb node where we want to print the ports.
314         :type node: dict
315         """
316
317         cmds = ("netstat -anp | grep java",
318                 "ps -ef | grep [h]oneycomb")
319
320         logger.info("Checking node {} ...".format(node['host']))
321         for cmd in cmds:
322             logger.info("Command: {}".format(cmd))
323             ssh = SSH()
324             ssh.connect(node)
325             ssh.exec_command_sudo(cmd)
326
327     @staticmethod
328     def configure_log_level(node, level):
329         """Set Honeycomb logging to the specified level.
330
331         :param node: Honeycomb node.
332         :param level: Log level (INFO, DEBUG, TRACE).
333         :type node: dict
334         :type level: str
335         """
336
337         find = 'logger name=\\"io.fd\\"'
338         replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level)
339
340         argument = '"/{0}/c\\ {1}"'.format(find, replace)
341         path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR)
342         command = "sed -i {0} {1}".format(argument, path)
343
344         ssh = SSH()
345         ssh.connect(node)
346         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
347         if ret_code != 0:
348             raise HoneycombError("Failed to modify configuration on "
349                                  "node {0}, {1}".format(node, stderr))
350
351     @staticmethod
352     def manage_honeycomb_features(node, feature, disable=False):
353         """Configure Honeycomb to use features that are disabled by default, or
354         disable previously enabled features.
355
356         ..Note:: If the module is not enabled in VPP, Honeycomb will
357         be unable to establish VPP connection.
358
359         :param node: Honeycomb node.
360         :param feature: Feature to enable.
361         :param disable: Disable the specified feature instead of enabling it.
362         :type node: dict
363         :type feature: string
364         :type disable: bool
365         :raises HoneycombError: If the configuration could not be changed.
366          """
367
368         disabled_features = {
369             "NSH": ["io.fd.hc2vpp.vppnsh.impl.VppNshModule"],
370             "BGP": ["io.fd.hc2vpp.bgp.inet.BgpInetModule",
371                     "io.fd.honeycomb.infra.bgp.BgpModule",
372                     "io.fd.honeycomb.infra.bgp.BgpReadersModule",
373                     "io.fd.honeycomb.infra.bgp.BgpWritersModule"]
374         }
375
376         ssh = SSH()
377         ssh.connect(node)
378
379         if feature in disabled_features.keys():
380             # for every module, uncomment by replacing the entire line
381             for item in disabled_features[feature]:
382                 find = replace = "{0}".format(item)
383                 if disable:
384                     replace = "// {0}".format(find)
385
386                 argument = '"/{0}/c\\ {1}"'.format(find, replace)
387                 path = "{0}/modules/*module-config"\
388                     .format(Const.REMOTE_HC_DIR)
389                 command = "sed -i {0} {1}".format(argument, path)
390
391                 (ret_code, _, stderr) = ssh.exec_command_sudo(command)
392                 if ret_code != 0:
393                     raise HoneycombError("Failed to modify configuration on "
394                                          "node {0}, {1}".format(node, stderr))
395         else:
396             raise HoneycombError(
397                 "Unrecognized feature {0}.".format(feature))
398
399     @staticmethod
400     def copy_java_libraries(node):
401         """Copy Java libraries installed by vpp-api-java package to honeycomb
402         lib folder.
403
404         This is a (temporary?) workaround for jvpp version mismatches.
405
406         :param node: Honeycomb node
407         :type node: dict
408         """
409
410         ssh = SSH()
411         ssh.connect(node)
412         (_, stdout, _) = ssh.exec_command_sudo(
413             "ls /usr/share/java | grep ^jvpp-*")
414
415         files = stdout.split("\n")[:-1]
416         for item in files:
417             # example filenames:
418             # jvpp-registry-17.04.jar
419             # jvpp-core-17.04.jar
420
421             parts = item.split("-")
422             version = "{0}-SNAPSHOT".format(parts[2][:5])
423             artifact_id = "{0}-{1}".format(parts[0], parts[1])
424
425             directory = "{0}/lib/io/fd/vpp/{1}/{2}".format(
426                 Const.REMOTE_HC_DIR, artifact_id, version)
427             cmd = "sudo mkdir -p {0}; " \
428                   "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format(
429                       directory, item, artifact_id, version)
430
431             (ret_code, _, stderr) = ssh.exec_command(cmd)
432             if ret_code != 0:
433                 raise HoneycombError("Failed to copy JVPP libraries on "
434                                      "node {0}, {1}".format(node, stderr))
435
436     @staticmethod
437     def copy_odl_client(node, odl_name, src_path, dst_path):
438         """Copy ODL Client from source path to destination path.
439
440         :param node: Honeycomb node.
441         :param odl_name: Name of ODL client version to use.
442         :param src_path: Source Path where to find ODl client.
443         :param dst_path: Destination path.
444         :type node: dict
445         :type odl_name: str
446         :type src_path: str
447         :type dst_path: str
448         :raises HoneycombError: If the operation fails.
449         """
450
451         ssh = SSH()
452         ssh.connect(node)
453
454         cmd = "sudo rm -rf {dst}/*karaf_{odl_name} && " \
455               "cp -r {src}/*karaf_{odl_name}* {dst}".format(
456                   src=src_path, odl_name=odl_name, dst=dst_path)
457
458         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=180)
459         if int(ret_code) != 0:
460             raise HoneycombError(
461                 "Failed to copy ODL client on node {0}".format(node["host"]))
462
463     @staticmethod
464     def setup_odl_client(node, path):
465         """Start ODL client on the specified node.
466
467         Karaf should be located in the provided path, and VPP and Honeycomb
468         should already be running, otherwise the start will fail.
469         :param node: Node to start ODL client on.
470         :param path: Path to ODL client on node.
471         :type node: dict
472         :type path: str
473         :raises HoneycombError: If Honeycomb fails to start.
474         """
475
476         logger.console("\nStarting ODL client ...")
477         ssh = SSH()
478         ssh.connect(node)
479
480         cmd = "{path}/*karaf*/bin/start clean".format(path=path)
481         ret_code, _, _ = ssh.exec_command_sudo(cmd)
482
483         if int(ret_code) != 0:
484             raise HoneycombError('Node {0} failed to start ODL.'.
485                                  format(node['host']))
486         else:
487             logger.info("Starting the ODL client on node {0} is "
488                         "in progress ...".format(node['host']))
489
490     @staticmethod
491     def install_odl_features(node, path, *features):
492         """Install required features on a running ODL client.
493
494         :param node: Honeycomb node.
495         :param path: Path to ODL client on node.
496         :param features: Optional, list of additional features to install.
497         :type node: dict
498         :type path: str
499         :type features: list
500         """
501
502         ssh = SSH()
503         ssh.connect(node)
504
505         cmd = "{path}/*karaf*/bin/client -u karaf feature:install " \
506               "odl-restconf-all " \
507               "odl-netconf-connector-all " \
508               "odl-netconf-topology".format(path=path)
509         for feature in features:
510             cmd += " {0}".format(feature)
511
512         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=250)
513
514         if int(ret_code) != 0:
515             raise HoneycombError("Feature install did not succeed.")
516
517     @staticmethod
518     def check_odl_startup_state(node):
519         """Check the status of ODL client startup.
520
521         :param node: Honeycomb node.
522         :param node: dict
523         :returns: True when ODL is started.
524         :rtype: bool
525         :raises HoneycombError: When the response is not code 200: OK.
526         """
527
528         path = HcUtil.read_path_from_url_file(
529             "odl_client/odl_netconf_connector")
530         expected_status_codes = (HTTPCodes.UNAUTHORIZED,
531                                  HTTPCodes.FORBIDDEN,
532                                  HTTPCodes.NOT_FOUND,
533                                  HTTPCodes.SERVICE_UNAVAILABLE,
534                                  HTTPCodes.INTERNAL_SERVER_ERROR)
535
536         status_code, _ = HTTPRequest.get(node, path, timeout=10,
537                                          enable_logging=False)
538         if status_code == HTTPCodes.OK:
539             logger.info("ODL client on node {0} is up and running".
540                         format(node['host']))
541         elif status_code in expected_status_codes:
542             if status_code == HTTPCodes.UNAUTHORIZED:
543                 logger.info('Unauthorized. If this triggers keyword '
544                             'timeout, verify username and password.')
545             raise HoneycombError('ODL client on node {0} running but '
546                                  'not yet ready.'.format(node['host']),
547                                  enable_logging=False)
548         else:
549             raise HoneycombError('Unexpected return code: {0}.'.
550                                  format(status_code))
551         return True
552
553     @staticmethod
554     def check_odl_shutdown_state(node):
555         """Check the status of ODL client shutdown.
556
557         :param node: Honeycomb node.
558         :type node: dict
559         :returns: True when ODL is stopped.
560         :rtype: bool
561         :raises HoneycombError: When the response is not code 200: OK.
562         """
563
564         cmd = "pgrep -f karaf"
565         path = HcUtil.read_path_from_url_file(
566             "odl_client/odl_netconf_connector")
567
568         try:
569             HTTPRequest.get(node, path, timeout=10, enable_logging=False)
570             raise HoneycombError("ODL client is still running.")
571         except HTTPRequestError:
572             logger.debug("Connection refused, checking process state....")
573             ssh = SSH()
574             ssh.connect(node)
575             ret_code, _, _ = ssh.exec_command(cmd)
576             if ret_code == 0:
577                 raise HoneycombError("ODL client is still running.")
578
579         return True
580
581     @staticmethod
582     def mount_honeycomb_on_odl(node):
583         """Tell ODL client to mount Honeycomb instance over netconf.
584
585         :param node: Honeycomb node.
586         :type node: dict
587         :raises HoneycombError: When the response is not code 200: OK.
588         """
589
590         path = HcUtil.read_path_from_url_file(
591             "odl_client/odl_netconf_connector")
592
593         url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC,
594                                     "odl_client/mount_honeycomb.json")
595
596         with open(url_file) as template:
597             data = template.read()
598
599         data = loads(data)
600
601         status_code, _ = HTTPRequest.post(
602             node,
603             path,
604             headers={"Content-Type": "application/json",
605                      "Accept": "text/plain"},
606             json=data,
607             timeout=10,
608             enable_logging=False)
609
610         if status_code == HTTPCodes.OK:
611             logger.info("ODL mount point configured successfully.")
612         elif status_code == HTTPCodes.CONFLICT:
613             logger.info("ODL mount point was already configured.")
614         else:
615             raise HoneycombError('Mount point configuration not successful')
616
617     @staticmethod
618     def stop_odl_client(node, path):
619         """Stop ODL client service on the specified node.
620
621         :param node: Node to start ODL client on.
622         :param path: Path to ODL client.
623         :type node: dict
624         :type path: str
625         :raises HoneycombError: If ODL client fails to stop.
626         """
627
628         ssh = SSH()
629         ssh.connect(node)
630
631         cmd = "{0}/*karaf*/bin/stop".format(path)
632
633         ssh = SSH()
634         ssh.connect(node)
635         ret_code, _, _ = ssh.exec_command_sudo(cmd)
636         if int(ret_code) != 0:
637             logger.debug("ODL Client refused to shut down.")
638             cmd = "pkill -f 'karaf'"
639             (ret_code, _, _) = ssh.exec_command_sudo(cmd)
640             if int(ret_code) != 0:
641                 raise HoneycombError('Node {0} failed to stop ODL.'.
642                                      format(node['host']))
643
644         logger.info("ODL client service stopped.")
645
646
647
648 class HoneycombStartupConfig(object):
649     """Generator for Honeycomb startup configuration.
650     """
651     def __init__(self):
652         """Initializer."""
653
654         self.template = """#!/bin/sh -
655         STATUS=100
656
657         while [ $STATUS -eq 100 ]
658         do
659           {java_call} -jar $(dirname $0)/{jar_filename}
660           STATUS=$?
661           echo "Honeycomb exited with status: $STATUS"
662           if [ $STATUS -eq 100 ]
663           then
664             echo "Restarting..."
665           fi
666         done
667         """
668
669         self.java_call = "{scheduler} {affinity} java{jit_mode}{params}"
670
671         self.scheduler = ""
672         self.core_affinity = ""
673         self.jit_mode = ""
674         self.params = ""
675         self.numa = ""
676
677         self.config = ""
678         self.ssh = SSH()
679
680     def apply_config(self, node):
681         """Generate configuration file /opt/honeycomb/honeycomb on the specified
682          node.
683
684          :param node: Honeycomb node.
685          :type node: dict
686          """
687
688         self.ssh.connect(node)
689         _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar")
690
691         java_call = self.java_call.format(scheduler=self.scheduler,
692                                           affinity=self.core_affinity,
693                                           jit_mode=self.jit_mode,
694                                           params=self.params)
695         self.config = self.template.format(java_call=java_call,
696                                            jar_filename=filename)
697
698         self.ssh.connect(node)
699         cmd = "echo '{config}' > /tmp/honeycomb " \
700               "&& chmod +x /tmp/honeycomb " \
701               "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".\
702             format(config=self.config)
703         self.ssh.exec_command(cmd)
704
705     def set_cpu_scheduler(self, scheduler="FIFO"):
706         """Use alternate CPU scheduler.
707
708         Note: OTHER scheduler doesn't load-balance over isolcpus.
709
710         :param scheduler: CPU scheduler to use.
711         :type scheduler: str
712         """
713
714         schedulers = {"FIFO": "-f 99",  # First In, First Out
715                       "RR": "-r 99",  # Round Robin
716                       "OTHER": "-o",  # Ubuntu default
717                      }
718         self.scheduler = "chrt {0}".format(schedulers[scheduler])
719
720     def set_cpu_core_affinity(self, low, high=None):
721         """Set core affinity for the honeycomb process and subprocesses.
722
723         :param low: Lowest core ID number.
724         :param high: Highest core ID number. Leave empty to use a single core.
725         :type low: int
726         :type high: int
727         """
728
729         self.core_affinity = "taskset -c {low}-{high}".format(
730             low=low, high=high if high else low)
731
732     def set_jit_compiler_mode(self, jit_mode):
733         """Set running mode for Java's JIT compiler.
734
735         :param jit_mode: Desiret JIT mode.
736         :type jit_mode: str
737         """
738
739         modes = {"client": " -client",  # Default
740                  "server": " -server",  # Higher performance but longer warmup
741                  "classic": " -classic"  # Disables JIT compiler
742                 }
743
744         self.jit_mode = modes[jit_mode]
745
746     def set_memory_size(self, mem_min, mem_max=None):
747         """Set minimum and maximum memory use for the JVM.
748
749         :param mem_min: Minimum amount of memory (MB).
750         :param mem_max: Maximum amount of memory (MB). Default is 4 times
751         minimum value.
752         :type mem_min: int
753         :type mem_max: int
754         """
755
756         self.params += " -Xms{min}m -Xmx{max}m".format(
757             min=mem_min, max=mem_max if mem_max else mem_min*4)
758
759     def set_metaspace_size(self, mem_min, mem_max=None):
760         """Set minimum and maximum memory used for class metadata in the JVM.
761
762         :param mem_min: Minimum metaspace size (MB).
763         :param mem_max: Maximum metaspace size (MB). Defailt is 4 times
764         minimum value.
765         :type mem_min: int
766         :type mem_max: int
767         """
768
769         self.params += " -XX:MetaspaceSize={min}m " \
770                        "-XX:MaxMetaspaceSize={max}m".format(
771                            min=mem_min, max=mem_max if mem_max else mem_min*4)
772
773     def set_numa_optimization(self):
774         """Use optimization of memory use and garbage collection for NUMA
775         architectures."""
776
777         self.params += " -XX:+UseNUMA -XX:+UseParallelGC"
778
779     def set_ssh_security_provider(self):
780         """Disables BouncyCastle for SSHD."""
781         # Workaround for issue described in:
782         # https://wiki.fd.io/view/Honeycomb/Releases/1609/Honeycomb_and_ODL
783
784         self.params += " -Dorg.apache.sshd.registerBouncyCastle=false"

©2016 FD.io a Linux Foundation Collaborative Project. All Rights Reserved.
Linux Foundation is a registered trademark of The Linux Foundation. Linux is a registered trademark of Linus Torvalds.
Please see our privacy policy and terms of use.