d10a5ccb4e0510aeb439ac05fbb0e424eb1c3783
[csit.git] / resources / libraries / python / honeycomb / HoneycombSetup.py
1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
5 #
6 #     http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
13
14 """Implementation of keywords for Honeycomb setup."""
15
16 from json import loads
17 from time import time, sleep
18
19 from ipaddress import IPv6Address, AddressValueError
20
21 from robot.api import logger
22
23 from resources.libraries.python.HTTPRequest import HTTPRequest, HTTPCodes, \
24     HTTPRequestError
25 from resources.libraries.python.constants import Constants as Const
26 from resources.libraries.python.honeycomb.HoneycombUtil import HoneycombError
27 from resources.libraries.python.honeycomb.HoneycombUtil \
28     import HoneycombUtil as HcUtil
29 from resources.libraries.python.ssh import SSH
30 from resources.libraries.python.topology import NodeType
31
32
33 class HoneycombSetup(object):
34     """Implements keywords for Honeycomb setup.
35
36     The keywords implemented in this class make possible to:
37     - start Honeycomb,
38     - stop Honeycomb,
39     - check the Honeycomb start-up state,
40     - check the Honeycomb shutdown state,
41     - add VPP to the topology.
42     """
43
44     def __init__(self):
45         pass
46
47     @staticmethod
48     def start_honeycomb_on_duts(*nodes):
49         """Start Honeycomb on specified DUT nodes.
50
51         This keyword starts the Honeycomb service on specified DUTs.
52         The keyword just starts the Honeycomb and does not check its startup
53         state. Use the keyword "Check Honeycomb Startup State" to check if the
54         Honeycomb is up and running.
55         Honeycomb must be installed in "/opt" directory, otherwise the start
56         will fail.
57         :param nodes: List of nodes to start Honeycomb on.
58         :type nodes: list
59         :raises HoneycombError: If Honeycomb fails to start.
60         """
61
62         HoneycombSetup.print_environment(nodes)
63
64         logger.console("\n(re)Starting Honeycomb service ...")
65
66         cmd = "sudo service honeycomb start"
67
68         for node in nodes:
69             if node['type'] == NodeType.DUT:
70                 ssh = SSH()
71                 ssh.connect(node)
72                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
73                 if int(ret_code) != 0:
74                     raise HoneycombError('Node {0} failed to start Honeycomb.'.
75                                          format(node['host']))
76                 else:
77                     logger.info("Starting the Honeycomb service on node {0} is "
78                                 "in progress ...".format(node['host']))
79
80     @staticmethod
81     def stop_honeycomb_on_duts(*nodes):
82         """Stop the Honeycomb service on specified DUT nodes.
83
84         This keyword stops the Honeycomb service on specified nodes. It just
85         stops the Honeycomb and does not check its shutdown state. Use the
86         keyword "Check Honeycomb Shutdown State" to check if Honeycomb has
87         stopped.
88         :param nodes: List of nodes to stop Honeycomb on.
89         :type nodes: list
90         :raises HoneycombError: If Honeycomb failed to stop.
91         """
92         logger.console("\nShutting down Honeycomb service ...")
93
94         cmd = "sudo service honeycomb stop"
95         errors = []
96
97         for node in nodes:
98             if node['type'] == NodeType.DUT:
99                 ssh = SSH()
100                 ssh.connect(node)
101                 (ret_code, _, _) = ssh.exec_command_sudo(cmd)
102                 if int(ret_code) != 0:
103                     errors.append(node['host'])
104                 else:
105                     logger.info("Stopping the Honeycomb service on node {0} is "
106                                 "in progress ...".format(node['host']))
107         if errors:
108             raise HoneycombError('Node(s) {0} failed to stop Honeycomb.'.
109                                  format(errors))
110
111     @staticmethod
112     def restart_honeycomb_on_dut(node):
113         """Restart Honeycomb on specified DUT nodes.
114
115         This keyword restarts the Honeycomb service on specified DUTs. Use the
116         keyword "Check Honeycomb Startup State" to check if the Honeycomb is up
117         and running.
118
119         :param node: Node to restart Honeycomb on.
120         :type node: dict
121         :raises HoneycombError: If Honeycomb fails to start.
122         """
123
124         logger.console("\n(re)Starting Honeycomb service ...")
125
126         cmd = "sudo service honeycomb restart"
127
128         ssh = SSH()
129         ssh.connect(node)
130         (ret_code, _, _) = ssh.exec_command_sudo(cmd)
131         if int(ret_code) != 0:
132             raise HoneycombError('Node {0} failed to restart Honeycomb.'.
133                                  format(node['host']))
134         else:
135             logger.info(
136                 "Honeycomb service restart is in progress on node {0}".format(
137                     node['host']))
138
139     @staticmethod
140     def check_honeycomb_startup_state(node, timeout=360, retries=20,
141                                       interval=15):
142         """Repeatedly check the status of Honeycomb startup until it is fully
143         started or until timeout or max retries is reached.
144
145         :param node: Honeycomb node.
146         :param timeout: Timeout value in seconds.
147         :param retries: Max number of retries.
148         :param interval: Interval between checks, in seconds.
149         :type node: dict
150         :type timeout: int
151         :type retries: int
152         :type interval: int
153         :raises HoneycombError: If the Honeycomb process IP cannot be found,
154         or if timeout or number of retries is exceeded."""
155
156         ssh = SSH()
157         ssh.connect(node)
158
159         count = 0
160         start = time()
161         while time() - start < timeout and count < retries:
162             count += 1
163
164             try:
165                 status_code_version, _ = HcUtil.get_honeycomb_data(
166                     node, "oper_vpp_version")
167                 status_code_if_cfg, _ = HcUtil.get_honeycomb_data(
168                     node, "config_vpp_interfaces")
169                 status_code_if_oper, _ = HcUtil.get_honeycomb_data(
170                     node, "oper_vpp_interfaces")
171             except HTTPRequestError:
172                 sleep(interval)
173                 continue
174             if status_code_if_cfg == HTTPCodes.OK\
175                     and status_code_if_cfg == HTTPCodes.OK\
176                     and status_code_if_oper == HTTPCodes.OK:
177                 logger.info("Check successful, Honeycomb is up and running.")
178                 break
179             else:
180                 logger.debug(
181                     "Attempt ${count} failed on Restconf check. Status codes:\n"
182                     "Version: {version}\n"
183                     "Interface config: {if_cfg}\n"
184                     "Interface operational: {if_oper}".format(
185                         count=count,
186                         version=status_code_version,
187                         if_cfg=status_code_if_cfg,
188                         if_oper=status_code_if_oper))
189                 sleep(interval)
190                 continue
191         else:
192             _, vpp_status, _ = ssh.exec_command("sudo service vpp status")
193             raise HoneycombError(
194                 "Timeout or max retries exceeded. Status of VPP:\n"
195                 "{vpp_status}".format(vpp_status=vpp_status))
196
197     @staticmethod
198     def check_honeycomb_shutdown_state(node):
199         """Check state of Honeycomb service during shutdown on specified nodes.
200
201         Honeycomb nodes reply with connection refused or the following status
202         codes depending on shutdown progress: codes 200, 404.
203
204         :param node: List of DUT nodes stopping Honeycomb.
205         :type node: dict
206         :return: True if all GETs fail to connect.
207         :rtype bool
208         """
209         cmd = "pgrep honeycomb"
210
211         ssh = SSH()
212         ssh.connect(node)
213         (ret_code, _, _) = ssh.exec_command_sudo(cmd)
214         if ret_code == 0:
215             raise HoneycombError('Honeycomb on node {0} is still '
216                                  'running.'.format(node['host']),
217                                  enable_logging=False)
218         else:
219             logger.info("Honeycomb on node {0} has stopped".
220                         format(node['host']))
221         return True
222
223     @staticmethod
224     def configure_restconf_binding_address(node):
225         """Configure Honeycomb to accept restconf requests from all IP
226         addresses. IP version is determined by node data.
227
228          :param node: Information about a DUT node.
229          :type node: dict
230          :raises HoneycombError: If the configuration could not be changed.
231          """
232
233         find = "restconf-binding-address"
234         try:
235             IPv6Address(unicode(node["host"]))
236             # if management IP of the node is in IPv6 format
237             replace = '\\"restconf-binding-address\\": \\"0::0\\",'
238         except (AttributeError, AddressValueError):
239             replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",'
240
241         argument = '"/{0}/c\\ {1}"'.format(find, replace)
242         path = "{0}/config/restconf.json".format(Const.REMOTE_HC_DIR)
243         command = "sed -i {0} {1}".format(argument, path)
244
245         ssh = SSH()
246         ssh.connect(node)
247         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
248         if ret_code != 0:
249             raise HoneycombError("Failed to modify configuration on "
250                                  "node {0}, {1}".format(node, stderr))
251
252     @staticmethod
253     def configure_jvpp_timeout(node, timeout=10):
254         """Configure timeout value for Java API commands Honeycomb sends to VPP.
255
256          :param node: Information about a DUT node.
257          :param timeout: Timeout value in seconds.
258          :type node: dict
259          :type timeout: int
260          :raises HoneycombError: If the configuration could not be changed.
261          """
262
263         find = "jvpp-request-timeout"
264         replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout)
265
266         argument = '"/{0}/c\\ {1}"'.format(find, replace)
267         path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR)
268         command = "sed -i {0} {1}".format(argument, path)
269
270         ssh = SSH()
271         ssh.connect(node)
272         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
273         if ret_code != 0:
274             raise HoneycombError("Failed to modify configuration on "
275                                  "node {0}, {1}".format(node, stderr))
276
277     @staticmethod
278     def print_environment(nodes):
279         """Print information about the nodes to log. The information is defined
280         by commands in cmds tuple at the beginning of this method.
281
282         :param nodes: List of DUT nodes to get information about.
283         :type nodes: list
284         """
285
286         # TODO: When everything is set and running in VIRL env, transform this
287         # method to a keyword checking the environment.
288
289         cmds = ("uname -a",
290                 "df -lh",
291                 "echo $JAVA_HOME",
292                 "echo $PATH",
293                 "which java",
294                 "java -version",
295                 "dpkg --list | grep openjdk",
296                 "ls -la /opt/honeycomb")
297
298         for node in nodes:
299             if node['type'] == NodeType.DUT:
300                 logger.info("Checking node {} ...".format(node['host']))
301                 for cmd in cmds:
302                     logger.info("Command: {}".format(cmd))
303                     ssh = SSH()
304                     ssh.connect(node)
305                     ssh.exec_command_sudo(cmd)
306
307     @staticmethod
308     def print_ports(node):
309         """Uses "sudo netstat -anp | grep java" to print port where a java
310         application listens.
311
312         :param node: Honeycomb node where we want to print the ports.
313         :type node: dict
314         """
315
316         cmds = ("netstat -anp | grep java",
317                 "ps -ef | grep [h]oneycomb")
318
319         logger.info("Checking node {} ...".format(node['host']))
320         for cmd in cmds:
321             logger.info("Command: {}".format(cmd))
322             ssh = SSH()
323             ssh.connect(node)
324             ssh.exec_command_sudo(cmd)
325
326     @staticmethod
327     def configure_log_level(node, level):
328         """Set Honeycomb logging to the specified level.
329
330         :param node: Honeycomb node.
331         :param level: Log level (INFO, DEBUG, TRACE).
332         :type node: dict
333         :type level: str
334         """
335
336         find = 'logger name=\\"io.fd\\"'
337         replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level)
338
339         argument = '"/{0}/c\\ {1}"'.format(find, replace)
340         path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR)
341         command = "sed -i {0} {1}".format(argument, path)
342
343         ssh = SSH()
344         ssh.connect(node)
345         (ret_code, _, stderr) = ssh.exec_command_sudo(command)
346         if ret_code != 0:
347             raise HoneycombError("Failed to modify configuration on "
348                                  "node {0}, {1}".format(node, stderr))
349
350     @staticmethod
351     def manage_honeycomb_features(node, feature, disable=False):
352         """Configure Honeycomb to use features that are disabled by default, or
353         disable previously enabled features.
354
355         ..Note:: If the module is not enabled in VPP, Honeycomb will
356         be unable to establish VPP connection.
357
358         :param node: Honeycomb node.
359         :param feature: Feature to enable.
360         :param disable: Disable the specified feature instead of enabling it.
361         :type node: dict
362         :type feature: string
363         :type disable: bool
364         :raises HoneycombError: If the configuration could not be changed.
365          """
366
367         disabled_features = {
368             "NSH": "io.fd.hc2vpp.vppnsh.impl.VppNshModule"
369         }
370
371         ssh = SSH()
372         ssh.connect(node)
373
374         if feature in disabled_features.keys():
375             # uncomment by replacing the entire line
376             find = replace = "{0}".format(disabled_features[feature])
377             if disable:
378                 replace = "// {0}".format(find)
379
380             argument = '"/{0}/c\\ {1}"'.format(find, replace)
381             path = "{0}/modules/*module-config"\
382                 .format(Const.REMOTE_HC_DIR)
383             command = "sed -i {0} {1}".format(argument, path)
384
385             (ret_code, _, stderr) = ssh.exec_command_sudo(command)
386             if ret_code != 0:
387                 raise HoneycombError("Failed to modify configuration on "
388                                      "node {0}, {1}".format(node, stderr))
389         else:
390             raise HoneycombError(
391                 "Unrecognized feature {0}.".format(feature))
392
393     @staticmethod
394     def copy_java_libraries(node):
395         """Copy Java libraries installed by vpp-api-java package to honeycomb
396         lib folder.
397
398         This is a (temporary?) workaround for jvpp version mismatches.
399
400         :param node: Honeycomb node
401         :type node: dict
402         """
403
404         ssh = SSH()
405         ssh.connect(node)
406         (_, stdout, _) = ssh.exec_command_sudo(
407             "ls /usr/share/java | grep ^jvpp-*")
408
409         files = stdout.split("\n")[:-1]
410         for item in files:
411             # example filenames:
412             # jvpp-registry-17.04.jar
413             # jvpp-core-17.04.jar
414
415             parts = item.split("-")
416             version = "{0}-SNAPSHOT".format(parts[2][:5])
417             artifact_id = "{0}-{1}".format(parts[0], parts[1])
418
419             directory = "{0}/lib/io/fd/vpp/{1}/{2}".format(
420                 Const.REMOTE_HC_DIR, artifact_id, version)
421             cmd = "sudo mkdir -p {0}; " \
422                   "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format(
423                       directory, item, artifact_id, version)
424
425             (ret_code, _, stderr) = ssh.exec_command(cmd)
426             if ret_code != 0:
427                 raise HoneycombError("Failed to copy JVPP libraries on "
428                                      "node {0}, {1}".format(node, stderr))
429
430     @staticmethod
431     def copy_odl_client(node, odl_name, src_path, dst_path):
432         """Copy ODL Client from source path to destination path.
433
434         :param node: Honeycomb node.
435         :param odl_name: Name of ODL client version to use.
436         :param src_path: Source Path where to find ODl client.
437         :param dst_path: Destination path.
438         :type node: dict
439         :type odl_name: str
440         :type src_path: str
441         :type dst_path: str
442         :raises HoneycombError: If the operation fails.
443         """
444
445         ssh = SSH()
446         ssh.connect(node)
447
448         cmd = "sudo rm -rf {dst}/*karaf_{odl_name} && " \
449               "cp -r {src}/*karaf_{odl_name}* {dst}".format(
450                   src=src_path, odl_name=odl_name, dst=dst_path)
451
452         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=180)
453         if int(ret_code) != 0:
454             raise HoneycombError(
455                 "Failed to copy ODL client on node {0}".format(node["host"]))
456
457     @staticmethod
458     def setup_odl_client(node, path):
459         """Start ODL client on the specified node.
460
461         Karaf should be located in the provided path, and VPP and Honeycomb
462         should already be running, otherwise the start will fail.
463         :param node: Node to start ODL client on.
464         :param path: Path to ODL client on node.
465         :type node: dict
466         :type path: str
467         :raises HoneycombError: If Honeycomb fails to start.
468         """
469
470         logger.console("\nStarting ODL client ...")
471         ssh = SSH()
472         ssh.connect(node)
473
474         cmd = "{path}/*karaf*/bin/start clean".format(path=path)
475         ret_code, _, _ = ssh.exec_command_sudo(cmd)
476
477         if int(ret_code) != 0:
478             raise HoneycombError('Node {0} failed to start ODL.'.
479                                  format(node['host']))
480         else:
481             logger.info("Starting the ODL client on node {0} is "
482                         "in progress ...".format(node['host']))
483
484     @staticmethod
485     def install_odl_features(node, path, *features):
486         """Install required features on a running ODL client.
487
488         :param node: Honeycomb node.
489         :param path: Path to ODL client on node.
490         :param features: Optional, list of additional features to install.
491         :type node: dict
492         :type path: str
493         :type features: list
494         """
495
496         ssh = SSH()
497         ssh.connect(node)
498
499         cmd = "{path}/*karaf*/bin/client -u karaf feature:install " \
500               "odl-restconf-all " \
501               "odl-netconf-connector-all " \
502               "odl-netconf-topology".format(path=path)
503         for feature in features:
504             cmd += " {0}".format(feature)
505
506         ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=250)
507
508         if int(ret_code) != 0:
509             raise HoneycombError("Feature install did not succeed.")
510
511     @staticmethod
512     def check_odl_startup_state(node):
513         """Check the status of ODL client startup.
514
515         :param node: Honeycomb node.
516         :param node: dict
517         :returns: True when ODL is started.
518         :rtype: bool
519         :raises HoneycombError: When the response is not code 200: OK.
520         """
521
522         path = HcUtil.read_path_from_url_file(
523             "odl_client/odl_netconf_connector")
524         expected_status_codes = (HTTPCodes.UNAUTHORIZED,
525                                  HTTPCodes.FORBIDDEN,
526                                  HTTPCodes.NOT_FOUND,
527                                  HTTPCodes.SERVICE_UNAVAILABLE,
528                                  HTTPCodes.INTERNAL_SERVER_ERROR)
529
530         status_code, _ = HTTPRequest.get(node, path, timeout=10,
531                                          enable_logging=False)
532         if status_code == HTTPCodes.OK:
533             logger.info("ODL client on node {0} is up and running".
534                         format(node['host']))
535         elif status_code in expected_status_codes:
536             if status_code == HTTPCodes.UNAUTHORIZED:
537                 logger.info('Unauthorized. If this triggers keyword '
538                             'timeout, verify username and password.')
539             raise HoneycombError('ODL client on node {0} running but '
540                                  'not yet ready.'.format(node['host']),
541                                  enable_logging=False)
542         else:
543             raise HoneycombError('Unexpected return code: {0}.'.
544                                  format(status_code))
545         return True
546
547     @staticmethod
548     def check_odl_shutdown_state(node):
549         """Check the status of ODL client shutdown.
550
551         :param node: Honeycomb node.
552         :type node: dict
553         :returns: True when ODL is stopped.
554         :rtype: bool
555         :raises HoneycombError: When the response is not code 200: OK.
556         """
557
558         cmd = "pgrep -f karaf"
559         path = HcUtil.read_path_from_url_file(
560             "odl_client/odl_netconf_connector")
561
562         try:
563             HTTPRequest.get(node, path, timeout=10, enable_logging=False)
564             raise HoneycombError("ODL client is still running.")
565         except HTTPRequestError:
566             logger.debug("Connection refused, checking process state....")
567             ssh = SSH()
568             ssh.connect(node)
569             ret_code, _, _ = ssh.exec_command(cmd)
570             if ret_code == 0:
571                 raise HoneycombError("ODL client is still running.")
572
573         return True
574
575     @staticmethod
576     def mount_honeycomb_on_odl(node):
577         """Tell ODL client to mount Honeycomb instance over netconf.
578
579         :param node: Honeycomb node.
580         :type node: dict
581         :raises HoneycombError: When the response is not code 200: OK.
582         """
583
584         path = HcUtil.read_path_from_url_file(
585             "odl_client/odl_netconf_connector")
586
587         url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC,
588                                     "odl_client/mount_honeycomb.json")
589
590         with open(url_file) as template:
591             data = template.read()
592
593         data = loads(data)
594
595         status_code, _ = HTTPRequest.post(
596             node,
597             path,
598             headers={"Content-Type": "application/json",
599                      "Accept": "text/plain"},
600             json=data,
601             timeout=10,
602             enable_logging=False)
603
604         if status_code == HTTPCodes.OK:
605             logger.info("ODL mount point configured successfully.")
606         elif status_code == HTTPCodes.CONFLICT:
607             logger.info("ODL mount point was already configured.")
608         else:
609             raise HoneycombError('Mount point configuration not successful')
610
611     @staticmethod
612     def stop_odl_client(node, path):
613         """Stop ODL client service on the specified node.
614
615         :param node: Node to start ODL client on.
616         :param path: Path to ODL client.
617         :type node: dict
618         :type path: str
619         :raises HoneycombError: If ODL client fails to stop.
620         """
621
622         ssh = SSH()
623         ssh.connect(node)
624
625         cmd = "{0}/*karaf*/bin/stop".format(path)
626
627         ssh = SSH()
628         ssh.connect(node)
629         ret_code, _, _ = ssh.exec_command_sudo(cmd)
630         if int(ret_code) != 0:
631             logger.debug("ODL Client refused to shut down.")
632             cmd = "pkill -f 'karaf'"
633             (ret_code, _, _) = ssh.exec_command_sudo(cmd)
634             if int(ret_code) != 0:
635                 raise HoneycombError('Node {0} failed to stop ODL.'.
636                                      format(node['host']))
637
638         logger.info("ODL client service stopped.")
639
640
641
642 class HoneycombStartupConfig(object):
643     """Generator for Honeycomb startup configuration.
644     """
645     def __init__(self):
646         """Initializer."""
647
648         self.template = """#!/bin/sh -
649         STATUS=100
650
651         while [ $STATUS -eq 100 ]
652         do
653           {java_call} -jar $(dirname $0)/{jar_filename}
654           STATUS=$?
655           echo "Honeycomb exited with status: $STATUS"
656           if [ $STATUS -eq 100 ]
657           then
658             echo "Restarting..."
659           fi
660         done
661         """
662
663         self.java_call = "{scheduler} {affinity} java{jit_mode}{params}"
664
665         self.scheduler = ""
666         self.core_affinity = ""
667         self.jit_mode = ""
668         self.params = ""
669         self.numa = ""
670
671         self.config = ""
672         self.ssh = SSH()
673
674     def apply_config(self, node):
675         """Generate configuration file /opt/honeycomb/honeycomb on the specified
676          node.
677
678          :param node: Honeycomb node.
679          :type node: dict
680          """
681
682         self.ssh.connect(node)
683         _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar")
684
685         java_call = self.java_call.format(scheduler=self.scheduler,
686                                           affinity=self.core_affinity,
687                                           jit_mode=self.jit_mode,
688                                           params=self.params)
689         self.config = self.template.format(java_call=java_call,
690                                            jar_filename=filename)
691
692         self.ssh.connect(node)
693         cmd = "echo '{config}' > /tmp/honeycomb " \
694               "&& chmod +x /tmp/honeycomb " \
695               "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".\
696             format(config=self.config)
697         self.ssh.exec_command(cmd)
698
699     def set_cpu_scheduler(self, scheduler="FIFO"):
700         """Use alternate CPU scheduler.
701
702         Note: OTHER scheduler doesn't load-balance over isolcpus.
703
704         :param scheduler: CPU scheduler to use.
705         :type scheduler: str
706         """
707
708         schedulers = {"FIFO": "-f 99",  # First In, First Out
709                       "RR": "-r 99",  # Round Robin
710                       "OTHER": "-o",  # Ubuntu default
711                      }
712         self.scheduler = "chrt {0}".format(schedulers[scheduler])
713
714     def set_cpu_core_affinity(self, low, high=None):
715         """Set core affinity for the honeycomb process and subprocesses.
716
717         :param low: Lowest core ID number.
718         :param high: Highest core ID number. Leave empty to use a single core.
719         :type low: int
720         :type high: int
721         """
722
723         self.core_affinity = "taskset -c {low}-{high}".format(
724             low=low, high=high if high else low)
725
726     def set_jit_compiler_mode(self, jit_mode):
727         """Set running mode for Java's JIT compiler.
728
729         :param jit_mode: Desiret JIT mode.
730         :type jit_mode: str
731         """
732
733         modes = {"client": " -client",  # Default
734                  "server": " -server",  # Higher performance but longer warmup
735                  "classic": " -classic"  # Disables JIT compiler
736                 }
737
738         self.jit_mode = modes[jit_mode]
739
740     def set_memory_size(self, mem_min, mem_max=None):
741         """Set minimum and maximum memory use for the JVM.
742
743         :param mem_min: Minimum amount of memory (MB).
744         :param mem_max: Maximum amount of memory (MB). Default is 4 times
745         minimum value.
746         :type mem_min: int
747         :type mem_max: int
748         """
749
750         self.params += " -Xms{min}m -Xmx{max}m".format(
751             min=mem_min, max=mem_max if mem_max else mem_min*4)
752
753     def set_metaspace_size(self, mem_min, mem_max=None):
754         """Set minimum and maximum memory used for class metadata in the JVM.
755
756         :param mem_min: Minimum metaspace size (MB).
757         :param mem_max: Maximum metaspace size (MB). Defailt is 4 times
758         minimum value.
759         :type mem_min: int
760         :type mem_max: int
761         """
762
763         self.params += " -XX:MetaspaceSize={min}m " \
764                        "-XX:MaxMetaspaceSize={max}m".format(
765                            min=mem_min, max=mem_max if mem_max else mem_min*4)
766
767     def set_numa_optimization(self):
768         """Use optimization of memory use and garbage collection for NUMA
769         architectures."""
770
771         self.params += " -XX:+UseNUMA -XX:+UseParallelGC"
772
773     def set_ssh_security_provider(self):
774         """Disables BouncyCastle for SSHD."""
775         # Workaround for issue described in:
776         # https://wiki.fd.io/view/Honeycomb/Releases/1609/Honeycomb_and_ODL
777
778         self.params += " -Dorg.apache.sshd.registerBouncyCastle=false"

©2016 FD.io a Linux Foundation Collaborative Project. All Rights Reserved.
Linux Foundation is a registered trademark of The Linux Foundation. Linux is a registered trademark of Linus Torvalds.
Please see our privacy policy and terms of use.