CSIT-755: Presentation and analytics layer 93/7993/19
authorTibor Frank <tifrank@cisco.com>
Fri, 11 Aug 2017 08:44:36 +0000 (10:44 +0200)
committerTibor Frank <tifrank@cisco.com>
Tue, 5 Sep 2017 13:36:20 +0000 (15:36 +0200)
- CSIT-760: Configuration - real example
- CSIT-774: Implementation - parse configuration
- CSIT-779: Implementation - set environment
- CSIT-780: Implementation - download data
- CSIT-783: Implementation - debug mode
- CSIT-761: Implementation - Data pre-processing - parse input files
- CSIT-784: Implementation - Data pre-processing - store the data, access to data
- CSIT-789: Implementation - Data pre-processing - extract Documentation of the suite
- CSIT-757: Low Level Design
- CSIT-788: Implementation - Data pre-processing - extract VAT history and show runtime
- CSIT-785: Implementation - Data filtering

Change-Id: I6fd1eb1df4af99eaf91925282cdee1c892698c59
Signed-off-by: Tibor Frank <tifrank@cisco.com>
resources/tools/presentation/configuration.py [new file with mode: 0644]
resources/tools/presentation/configuration.yaml [new file with mode: 0644]
resources/tools/presentation/data.py [new file with mode: 0644]
resources/tools/presentation/environment.py [new file with mode: 0644]
resources/tools/presentation/errors.py [new file with mode: 0644]
resources/tools/presentation/inputs.py [new file with mode: 0644]
resources/tools/presentation/lld.rst [new file with mode: 0644]
resources/tools/presentation/presentation.py [new file with mode: 0644]
resources/tools/presentation/templates/tmpl_performance_improvements.csv [new file with mode: 0644]

diff --git a/resources/tools/presentation/configuration.py b/resources/tools/presentation/configuration.py
new file mode 100644 (file)
index 0000000..4307abc
--- /dev/null
@@ -0,0 +1,445 @@
+# Copyright (c) 2017 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Configuration
+
+Parsing of the configuration YAML file.
+"""
+
+
+import logging
+from yaml import load, YAMLError
+from pprint import pformat
+
+from errors import PresentationError
+
+
+class Configuration(object):
+    """Configuration of Presentation and analytics layer.
+
+    - based on configuration specified in the configuration YAML file
+    - presentation and analytics layer is model driven
+    """
+
+    # Tags are used in configuration YAML file and replaced while the file is
+    # parsed.
+    TAG_OPENER = "{"
+    TAG_CLOSER = "}"
+
+    def __init__(self, cfg_file):
+        """Initialization.
+
+        :param cfg_file: File handler for the configuration YAML file.
+        :type cfg_file: BinaryIO
+        """
+        self._cfg_file = cfg_file
+        self._cfg_yaml = None
+
+        # The configuration is stored in this directory.
+        self._configuration = {"environment": dict(),
+                               "debug": dict(),
+                               "input": dict(),
+                               "output": dict(),
+                               "tables": list(),
+                               "plots": list()}
+
+    @property
+    def configuration(self):
+        """Getter - configuration.
+
+        :returns: Configuration.
+        :rtype: dict
+        """
+        return self._configuration
+
+    @property
+    def environment(self):
+        """Getter - environment.
+
+        :returns: Environment configuration.
+        :rtype: dict
+        """
+        return self._configuration["environment"]
+
+    @property
+    def debug(self):
+        """Getter - debug
+
+        :returns: Debug configuration
+        :rtype: dict
+        """
+        return self._configuration["debug"]
+
+    @property
+    def is_debug(self):
+        """Getter - debug mode
+
+        :returns: True if debug mode is on, otherwise False.
+        :rtype: bool
+        """
+
+        try:
+            if self.environment["configuration"]["CFG[DEBUG]"] == 1:
+                return True
+            else:
+                return False
+        except KeyError:
+            return False
+
+    @property
+    def input(self):
+        """Getter - configuration - inputs.
+        - jobs and builds.
+
+        :returns: Inputs.
+        :rtype: dict
+        """
+        return self._configuration["input"]
+
+    @property
+    def builds(self):
+        """Getter - builds defined in configuration.
+
+        :returns: Builds defined in the configuration.
+        :rtype: dict
+        """
+        return self.input["builds"]
+
+    @property
+    def output(self):
+        """Getter - configuration - output formats and versions to be generated.
+        - formats: html, pdf
+        - versions: full, ...
+
+        :returns: Outputs to be generated.
+        :rtype: dict
+        """
+        return self._configuration["output"]
+
+    @property
+    def tables(self):
+        """Getter - tables to be generated.
+
+        :returns: List of specifications of tables to be generated.
+        :rtype: list
+        """
+        return self._configuration["tables"]
+
+    @property
+    def plots(self):
+        """Getter - plots to be generated.
+
+        :returns: List of specifications of plots to be generated.
+        :rtype: list
+        """
+        return self._configuration["plots"]
+
+    def set_input_state(self, job, build_nr, state):
+        """Set the state of input
+
+        :param job:
+        :param build_nr:
+        :param state:
+        :return:
+        """
+
+        try:
+            for build in self._configuration["input"]["builds"][job]:
+                if build["build"] == build_nr:
+                    build["status"] = state
+                    break
+            else:
+                raise PresentationError("Build '{}' is not defined for job '{}'"
+                                        " in configuration file.".
+                                        format(build_nr, job))
+        except KeyError:
+            raise PresentationError("Job '{}' and build '{}' is not defined in "
+                                    "configuration file.".format(job, build_nr))
+
+    def set_input_file_name(self, job, build_nr, file_name):
+        """Set the state of input
+
+        :param job:
+        :param build_nr:
+        :param file_name:
+        :return:
+        """
+
+        try:
+            for build in self._configuration["input"]["builds"][job]:
+                if build["build"] == build_nr:
+                    build["file-name"] = file_name
+                    break
+            else:
+                raise PresentationError("Build '{}' is not defined for job '{}'"
+                                        " in configuration file.".
+                                        format(build_nr, job))
+        except KeyError:
+            raise PresentationError("Job '{}' and build '{}' is not defined in "
+                                    "configuration file.".format(job, build_nr))
+
+    def _get_type_index(self, item_type):
+        """Get index of item type (environment, input, output, ...) in
+        configuration YAML file.
+
+        :param item_type: Item type: Top level items in configuration YAML file,
+        e.g.: environment, input, output.
+        :type item_type: str
+        :returns: Index of the given item type.
+        :rtype: int
+        """
+
+        index = 0
+        for item in self._cfg_yaml:
+            if item["type"] == item_type:
+                return index
+            index += 1
+        return None
+
+    def _find_tag(self, text):
+        """Find the first tag in the given text. The tag is enclosed by the
+        TAG_OPENER and TAG_CLOSER.
+
+        :param text: Text to be searched.
+        :type text: str
+        :returns: The tag, or None if not found.
+        :rtype: str
+        """
+        try:
+            start = text.index(self.TAG_OPENER)
+            end = text.index(self.TAG_CLOSER, start + 1) + 1
+            return text[start:end]
+        except ValueError:
+            return None
+
+    def _replace_tags(self, data, src_data=None):
+        """Replace tag(s) in the data by their values.
+
+        :param data: The data where the tags will be replaced by their values.
+        :param src_data: Data where the tags are defined. It is dictionary where
+        the key is the tag and the value is the tag value. If not given, 'data'
+        is used instead.
+        :type data: str or dict
+        :type src_data: dict
+        :returns: Data with the tags replaced.
+        :rtype: str or dict
+        :raises: PresentationError if it is not possible to replace the tag or
+        the data is not the supported data type (str, dict).
+        """
+
+        if src_data is None:
+            src_data = data
+
+        if isinstance(data, str):
+            tag = self._find_tag(data)
+            if tag is not None:
+                data = data.replace(tag, src_data[tag[1:-1]])
+
+        elif isinstance(data, dict):
+            counter = 0
+            for key, value in data.items():
+                tag = self._find_tag(value)
+                if tag is not None:
+                    try:
+                        data[key] = value.replace(tag, src_data[tag[1:-1]])
+                        counter += 1
+                    except KeyError:
+                        raise PresentationError("Not possible to replace the "
+                                                "tag '{}'".format(tag))
+            if counter:
+                self._replace_tags(data, src_data)
+        else:
+            raise PresentationError("Replace tags: Not supported data type.")
+
+        return data
+
+    def _parse_env(self):
+        """Parse environment configuration in the configuration YAML file.
+        """
+
+        logging.info("Parsing configuration file: environment ...")
+
+        idx = self._get_type_index("environment")
+        if idx is None:
+            return None
+
+        try:
+            self._configuration["environment"]["configuration"] = \
+                self._cfg_yaml[idx]["configuration"]
+        except KeyError:
+            self._configuration["environment"]["configuration"] = None
+
+        try:
+            self._configuration["environment"]["paths"] = \
+                self._replace_tags(self._cfg_yaml[idx]["paths"])
+        except KeyError:
+            self._configuration["environment"]["paths"] = None
+
+        try:
+            self._configuration["environment"]["urls"] = \
+                self._replace_tags(self._cfg_yaml[idx]["urls"])
+        except KeyError:
+            self._configuration["environment"]["urls"] = None
+
+        try:
+            self._configuration["environment"]["make-dirs"] = \
+                self._cfg_yaml[idx]["make-dirs"]
+        except KeyError:
+            self._configuration["environment"]["make-dirs"] = None
+
+        try:
+            self._configuration["environment"]["remove-dirs"] = \
+                self._cfg_yaml[idx]["remove-dirs"]
+        except KeyError:
+            self._configuration["environment"]["remove-dirs"] = None
+
+        try:
+            self._configuration["environment"]["build-dirs"] = \
+                self._cfg_yaml[idx]["build-dirs"]
+        except KeyError:
+            self._configuration["environment"]["build-dirs"] = None
+
+        logging.info("Done.")
+
+    def _parse_debug(self):
+        """Parse debug configuration in the configuration YAML file.
+        """
+
+        logging.info("Parsing configuration file: debug ...")
+
+        idx = self._get_type_index("debug")
+        if idx is None:
+            self.environment["configuration"]["CFG[DEBUG]"] = 0
+            return None
+
+        try:
+            for key, value in self._cfg_yaml[idx]["general"].items():
+                self._configuration["debug"][key] = value
+
+            self._configuration["input"]["builds"] = dict()
+            for job, builds in self._cfg_yaml[idx]["builds"].items():
+                if builds:
+                    self._configuration["input"]["builds"][job] = list()
+                    for build in builds:
+                        self._configuration["input"]["builds"][job].\
+                            append({"build": build["build"],
+                                    "status": "downloaded",
+                                    "file-name": self._replace_tags(
+                                        build["file"],
+                                        self.environment["paths"])})
+                else:
+                    logging.warning("No build is defined for the job '{}'. "
+                                    "Trying to continue without it.".
+                                    format(job))
+
+        except KeyError:
+            raise PresentationError("No data to process.")
+
+    def _parse_input(self):
+        """Parse input configuration in the configuration YAML file.
+
+        :raises: PresentationError if there are no data to process.
+        """
+
+        logging.info("Parsing configuration file: input ...")
+
+        idx = self._get_type_index("input")
+        if idx is None:
+            raise PresentationError("No data to process.")
+
+        try:
+            for key, value in self._cfg_yaml[idx]["general"].items():
+                self._configuration["input"][key] = value
+            self._configuration["input"]["builds"] = dict()
+            for job, builds in self._cfg_yaml[idx]["builds"].items():
+                if builds:
+                    self._configuration["input"]["builds"][job] = list()
+                    for build in builds:
+                        self._configuration["input"]["builds"][job].\
+                            append({"build": build, "status": None})
+                else:
+                    logging.warning("No build is defined for the job '{}'. "
+                                    "Trying to continue without it.".
+                                    format(job))
+        except KeyError:
+            raise PresentationError("No data to process.")
+
+        logging.info("Done.")
+
+    def _parse_output(self):
+        """Parse output configuration in the configuration YAML file.
+
+        :raises: PresentationError if there is no output defined.
+        """
+
+        logging.info("Parsing configuration file: output ...")
+
+        idx = self._get_type_index("output")
+        if idx is None:
+            raise PresentationError("No output defined.")
+
+        try:
+            self._configuration["output"] = self._cfg_yaml[idx]["format"]
+        except KeyError:
+            raise PresentationError("No output defined.")
+
+        logging.info("Done.")
+
+    def _parse_elements(self):
+        """Parse elements (tables, plots) configuration in the configuration
+        YAML file.
+        """
+
+        logging.info("Parsing configuration file: elements ...")
+
+        count = 1
+        for element in self._cfg_yaml:
+            try:
+                element["output-file"] = self._replace_tags(
+                    element["output-file"],
+                    self._configuration["environment"]["paths"])
+            except KeyError:
+                pass
+            if element["type"] == "table":
+                logging.info("  {:3d} Processing a table ...".format(count))
+                self._configuration["tables"].append(element)
+                count += 1
+            elif element["type"] == "plot":
+                logging.info("  {:3d} Processing a plot ...".format(count))
+                self._configuration["plots"].append(element)
+                count += 1
+
+        logging.info("Done.")
+
+    def parse_cfg(self):
+        """Parse configuration in the configuration YAML file.
+
+        :raises: PresentationError if An error occurred while parsing the
+        configuration file.
+        """
+        try:
+            self._cfg_yaml = load(self._cfg_file)
+        except YAMLError as err:
+            raise PresentationError(msg="An error occurred while parsing the "
+                                        "configuration file.",
+                                    details=str(err))
+
+        self._parse_env()
+        self._parse_debug()
+        if not self.debug:
+            self._parse_input()
+        self._parse_output()
+        self._parse_elements()
+
+        logging.debug("Configuration: \n{}".
+                      format(pformat(self._configuration)))
diff --git a/resources/tools/presentation/configuration.yaml b/resources/tools/presentation/configuration.yaml
new file mode 100644 (file)
index 0000000..a7e6ea9
--- /dev/null
@@ -0,0 +1,230 @@
+# NOTE: This is only an example. The work is in progress so it can be changed.
+-
+  type: "environment"
+  configuration:
+    # Debug mode:
+    # - Skip:
+    #   - Download of input data files
+    # - Do:
+    #   - Read data from given zip / xml files
+    #   - Set the configuration as it is done in normal mode
+    # If the section "type: debug" is missing, CFG[DEBUG] is set to 0.
+    CFG[DEBUG]: 1
+
+    CFG[BLD_LATEX]: 1  # Remove?
+    CFG[BLD_HTML]: 1  # Remove?
+
+  paths:
+    DIR[WORKING]: "_tmp"
+    DIR[BUILD,HTML]: "_build"
+    DIR[BUILD,LATEX]: "_build_latex"
+    DIR[RST]: "../../../docs/report"
+
+    DIR[WORKING,DATA]: "{DIR[WORKING]}/data"
+    
+    DIR[STATIC]: "{DIR[BUILD,HTML]}/_static"
+    DIR[STATIC,VPP]: "{DIR[STATIC]}/vpp"
+    DIR[STATIC,DPDK]: "{DIR[STATIC]}/dpdk"
+    DIR[STATIC,ARCH]: "{DIR[STATIC]}/archive"
+    DIR[STATIC,TREND]: "{DIR[STATIC]}/trending"
+
+    DIR[PLOT,VPP]: "{DIR[WORKING]}/vpp_plot"
+    DIR[PLOT,DPDK]: "{DIR[WORKING]}/dpdk_plot"
+
+    DIR[DTR]: "{DIR[RST]}/detailed_test_results"
+    DIR[DTR,PERF,DPDK]: "{DIR[DTR]}/dpdk_performance_results"
+    DIR[DTR,PERF,VPP]: "{DIR[DTR]}/vpp_performance_results"
+    DIR[DTR,PERF,HC]: "{DIR[DTR]}/honeycomb_performance_results"
+    DIR[DTR,FUNC,VPP]: "{DIR[DTR]}/vpp_functional_results"
+    DIR[DTR,FUNC,HC]: "{DIR[DTR]}/honeycomb_functional_results"
+    DIR[DTR,FUNC,NSHSFC]: "{DIR[DTR]}/nshsfc_functional_results"
+    DIR[DTR,PERF,VPP,IMPRV]: "{DIR[RST]}/vpp_performance_tests/performance_improvements"
+
+    DIR[DTC]: "{DIR[RST]}/test_configuration"
+    DIR[DTC,PERF,VPP]: "{DIR[DTC]}/vpp_performance_configuration"
+    DIR[DTC,FUNC,VPP]: "{DIR[DTC]}/vpp_functional_configuration"
+    
+    DIR[DTO]: "{DIR[RST]}/test_operational_data"
+    DIR[DTO,PERF,VPP]: "{DIR[DTO]}/vpp_performance_operational_data"
+
+    DIR[CSS_PATCH_FILE]: "{DIR[STATIC]}/theme_overrides.css"
+
+  urls:
+    URL[JENKINS,CSIT]: "https://jenkins.fd.io/view/csit/job"
+    URL[JENKINS,HC]: "https://jenkins.fd.io/view/hc2vpp/job"
+
+  make-dirs:
+  # List the directories which are created while preparing the environment.
+  # All directories MUST be defined in "paths" section.
+  - "DIR[WORKING,DATA]"
+  - "DIR[STATIC,VPP]"
+  - "DIR[STATIC,DPDK]"
+  - "DIR[STATIC,ARCH]"
+  - "DIR[STATIC,TREND]"
+  - "DIR[PLOT,VPP]"
+  - "DIR[PLOT,DPDK]"
+  - "DIR[BUILD,LATEX]"
+
+  remove-dirs:
+  # List the directories which are deleted while cleaning the environment.
+  # All directories MUST be defined in "paths" section.
+  - "DIR[WORKING]"
+
+  build-dirs:
+  # List the directories where the results (build) is stored.
+  # All directories MUST be defined in "paths" section.
+  - "DIR[BUILD,HTML]"
+  - "DIR[BUILD,LATEX]"
+
+-
+  type: "debug"
+  general:
+    input-format: "xml"  # zip or xml
+    extract: "output.xml"  # Only for zip
+  builds:
+    # The files must be in the directory DIR[WORKING,DATA]
+    csit-vpp-perf-1707-all:
+    -
+      build: 13
+      file: "{DIR[WORKING,DATA]}/csit-vpp-perf-1707-all__13__output.xml"
+    -
+      build: 16
+      file: "{DIR[WORKING,DATA]}/csit-vpp-perf-1707-all__16__output.xml"
+    -
+      build: 17
+      file: "{DIR[WORKING,DATA]}/csit-vpp-perf-1707-all__17__output.xml"
+
+-
+  type: "input"  # Ignored in debug mode
+  general:
+    file-name: "robot-plugin.zip"
+    download-path: "{job}/{build}/robot/report/*zip*/{filename}"
+#    file-name: "output_perf_data.xml"
+#    download-file-name: "{job}/{build}/robot/report/{filename}"
+    extract: "output.xml"
+  builds:
+    csit-vpp-perf-1707-all:
+    - 9
+    - 10
+    - 13
+    - 14
+    - 15
+    - 16
+    - 17
+    - 18
+    - 19
+    - 21
+    - 22
+    csit-dpdk-perf-1704-all:
+    - 1
+    - 2
+    - 3
+    - 4
+    - 5
+    - 6
+    - 7
+    - 8
+    - 9
+    - 10
+    csit-vpp-functional-1707-ubuntu1604-virl:
+    - lastSuccessfulBuild
+    hc2vpp-csit-perf-master-ubuntu1604:
+    - 8
+    - 9
+    hc2vpp-csit-integration-1707-ubuntu1604:
+    - lastSuccessfulBuild
+    csit-nsh_sfc-verify-func-1707-ubuntu1604-virl:
+    - 2
+    csit-vpp-perf-1704-all:
+    - 6
+    - 7
+    - 8
+    - 9
+    - 10
+    - 12
+    - 14
+    - 15
+    - 16
+    - 17
+    csit-dpdk-perf-1704-all:
+    - 1
+    - 2
+    - 3
+    - 4
+    - 6
+    - 7
+    - 8
+    - 9
+    - 10
+    - 11
+
+-
+  type: "output"
+  format:
+    html:
+    - full
+    pdf:
+    - full
+    - marketing
+
+-
+# Example
+  type: "table"
+  title: "Performance improvments"
+  algoritm: "performance-improvements"
+  template: "templates/tmpl_performance_improvements.csv"
+  output-file-format: "csv"
+  output-file: "{DIR[WORKING]}/path/to/my_table.csv"
+  columns:
+  -
+    title: "VPP Functionality"
+    data: "template 2"
+  -
+    title: "Test Name"
+    data: "template 3"
+  -
+    title: "VPP-17.04 mean [Mpps]"
+    data: "vpp 1704 performance mean"
+  -
+    title: "VPP-17.07 mean [Mpps]"
+    data: "vpp 1707 performance mean"
+  -
+    title: "VPP-17.07 stdev [Mpps]"
+    data: "vpp 1707 performance stdev"
+  -
+    title: "17.04 to 17.07 change"
+    data: "change-relative 4 5"
+  rows: "generated"
+  data:
+    csit-vpp-perf-1707-all:
+    - 13
+    - 16
+    - 17
+  # Keep this formatting, the filter is enclosed with " (quotation mark) and
+  # each tag is enclosed with ' (apostrophe).
+  filter: "'64B' and '1T1C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST'"
+  parameters:
+  - "throughput"
+
+-
+  type: "plot"
+  output-file-type: "html"
+  output-file: "{DIR[WORKING]}/path/to/my_plot.html"
+  plot-type: "performance-box"   # box, line
+  plot-title: "plot title"
+  x-axis: "x-axis title"
+  y-axis: "y-axis title"
+  data:
+    csit-vpp-perf-1707-all:
+    - 9
+    - 10
+    - 13
+    - 14
+    - 15
+    - 16
+    - 17
+    - 18
+    - 19
+    - 21
+  filter:
+    - "'64B' and '1T1C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST'"
diff --git a/resources/tools/presentation/data.py b/resources/tools/presentation/data.py
new file mode 100644 (file)
index 0000000..e6de035
--- /dev/null
@@ -0,0 +1,768 @@
+# Copyright (c) 2017 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Data pre-processing
+
+- extract data from output.xml files generated by Jenkins jobs and store in
+  pandas' Series,
+- provide access to the data.
+"""
+
+import re
+import pandas as pd
+import logging
+
+from robot.api import ExecutionResult, ResultVisitor
+
+from errors import PresentationError
+
+
+class ExecutionChecker(ResultVisitor):
+    """Class to traverse through the test suite structure.
+
+    The functionality implemented in this class generates a json structure:
+
+    {
+        "metadata": {  # Optional
+            "version": "VPP version",
+            "job": "Jenkins job name"
+            "build": "Information about the build"
+        },
+        "suites": {
+            "Suite name 1": {
+                "doc": "Suite 1 documentation"
+            }
+            "Suite name N": {
+                "doc": "Suite N documentation"
+            }
+        }
+        "tests": {
+            "ID": {
+                "name": "Test name",
+                "parent": "Name of the parent of the test",
+                "tags": ["tag 1", "tag 2", "tag n"],
+                "type": "PDR" | "NDR",
+                "throughput": {
+                    "value": int,
+                    "unit": "pps" | "bps" | "percentage"
+                },
+                "latency": {
+                    "direction1": {
+                        "100": {
+                            "min": int,
+                            "avg": int,
+                            "max": int
+                        },
+                        "50": {  # Only for NDR
+                            "min": int,
+                            "avg": int,
+                            "max": int
+                        },
+                        "10": {  # Only for NDR
+                            "min": int,
+                            "avg": int,
+                            "max": int
+                        }
+                    },
+                    "direction2": {
+                        "100": {
+                            "min": int,
+                            "avg": int,
+                            "max": int
+                        },
+                        "50": {  # Only for NDR
+                            "min": int,
+                            "avg": int,
+                            "max": int
+                        },
+                        "10": {  # Only for NDR
+                            "min": int,
+                            "avg": int,
+                            "max": int
+                        }
+                    }
+                },
+                "lossTolerance": "lossTolerance",  # Only for PDR
+                "vat-history": {
+                    "DUT1": " DUT1 VAT History",
+                    "DUT2": " DUT2 VAT History"
+                },
+                "show-run": "Show Run"
+            },
+            "ID" {
+                # next test
+            }
+        }
+    }
+
+    .. note:: ID is the lowercase full path to the test.
+    """
+
+    REGEX_RATE = re.compile(r'^[\D\d]*FINAL_RATE:\s(\d+\.\d+)\s(\w+)')
+
+    REGEX_LAT_NDR = re.compile(r'^[\D\d]*'
+                               r'LAT_\d+%NDR:\s\[\'(-?\d+\/-?\d+/-?\d+)\','
+                               r'\s\'(-?\d+/-?\d+/-?\d+)\'\]\s\n'
+                               r'LAT_\d+%NDR:\s\[\'(-?\d+/-?\d+/-?\d+)\','
+                               r'\s\'(-?\d+/-?\d+/-?\d+)\'\]\s\n'
+                               r'LAT_\d+%NDR:\s\[\'(-?\d+/-?\d+/-?\d+)\','
+                               r'\s\'(-?\d+/-?\d+/-?\d+)\'\]')
+
+    REGEX_LAT_PDR = re.compile(r'^[\D\d]*'
+                               r'LAT_\d+%PDR:\s\[\'(-?\d+/-?\d+/-?\d+)\','
+                               r'\s\'(-?\d+/-?\d+/-?\d+)\'\][\D\d]*')
+
+    REGEX_TOLERANCE = re.compile(r'^[\D\d]*LOSS_ACCEPTANCE:\s(\d*\.\d*)\s'
+                                 r'[\D\d]*')
+
+    REGEX_VERSION = re.compile(r"(stdout: 'vat# vat# Version:)(\s*)(.*)")
+
+    def __init__(self, **metadata):
+        """Initialisation.
+
+        :param metadata: Key-value pairs to be included to "metadata" part of
+        JSON structure.
+        :type metadata: dict
+        """
+
+        # Type of message to parse out from the test messages
+        self._msg_type = None
+
+        # VPP version
+        self._version = None
+
+        # Number of VAT History messages found:
+        # 0 - no message
+        # 1 - VAT History of DUT1
+        # 2 - VAT History of DUT2
+        self._vat_history_lookup_nr = 0
+
+        # Number of Show Running messages found
+        # 0 - no message
+        # 1 - Show run message found
+        self._show_run_lookup_nr = 0
+
+        # Test ID of currently processed test- the lowercase full path to the
+        # test
+        self._test_ID = None
+
+        # The main data structure
+        self._data = {
+            "metadata": {
+            },
+            "suites": {
+            },
+            "tests": {
+            }
+        }
+
+        # Save the provided metadata
+        for key, val in metadata.items():
+            self._data["metadata"][key] = val
+
+        # Dictionary defining the methods used to parse different types of
+        # messages
+        self.parse_msg = {
+            "setup-version": self._get_version,
+            "teardown-vat-history": self._get_vat_history,
+            "teardown-show-runtime": self._get_show_run
+        }
+
+    @property
+    def data(self):
+        """Getter - Data parsed from the XML file.
+
+        :returns: Data parsed from the XML file.
+        :rtype: dict
+        """
+        return self._data
+
+    def _get_version(self, msg):
+        """Called when extraction of VPP version is required.
+
+        :param msg: Message to process.
+        :type msg: Message
+        :returns: Nothing.
+        """
+
+        if msg.message.count("stdout: 'vat# vat# Version:"):
+            self._version = str(re.search(self.REGEX_VERSION, msg.message).
+                                group(3))
+            self._data["metadata"]["version"] = self._version
+            self._msg_type = None
+
+            logging.debug("    VPP version: {0}".format(self._version))
+
+    def _get_vat_history(self, msg):
+        """Called when extraction of VAT command history is required.
+
+        :param msg: Message to process.
+        :type msg: Message
+        :returns: Nothing.
+        """
+        if msg.message.count("VAT command history:"):
+            self._vat_history_lookup_nr += 1
+            text = re.sub("[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3} "
+                          "VAT command history:", "", msg.message, count=1).\
+                replace('"', "'")
+
+            if self._vat_history_lookup_nr == 1:
+                self._data["tests"][self._test_ID]["vat-history"] = dict()
+                self._data["tests"][self._test_ID]["vat-history"]["DUT1"] = text
+            elif self._vat_history_lookup_nr == 2:
+                self._data["tests"][self._test_ID]["vat-history"]["DUT2"] = text
+            self._msg_type = None
+
+    def _get_show_run(self, msg):
+        """Called when extraction of VPP operational data (output of CLI command
+        Show Runtime) is required.
+
+        :param msg: Message to process.
+        :type msg: Message
+        :returns: Nothing.
+        """
+        if msg.message.count("vat# Thread "):
+            self._show_run_lookup_nr += 1
+            text = msg.message.replace("vat# ", "").\
+                replace("return STDOUT ", "").replace('"', "'")
+            if self._show_run_lookup_nr == 1:
+                self._data["tests"][self._test_ID]["show-run"] = text
+            self._msg_type = None
+
+    def _get_latency(self, msg, test_type):
+        """Get the latency data from the test message.
+
+        :param msg: Message to be parsed.
+        :param test_type: Type of the test - NDR or PDR.
+        :type msg: str
+        :type test_type: str
+        :returns: Latencies parsed from the message.
+        :rtype: dict
+        """
+
+        if test_type == "NDR":
+            groups = re.search(self.REGEX_LAT_NDR, msg)
+            groups_range = range(1, 7)
+        elif test_type == "PDR":
+            groups = re.search(self.REGEX_LAT_PDR, msg)
+            groups_range = range(1, 3)
+        else:
+            return {}
+
+        latencies = list()
+        for idx in groups_range:
+            try:
+                lat = [int(item) for item in str(groups.group(idx)).split('/')]
+            except (AttributeError, ValueError):
+                lat = [-1, -1, -1]
+            latencies.append(lat)
+
+        keys = ("min", "avg", "max")
+        latency = {
+            "direction1": {
+            },
+            "direction2": {
+            }
+        }
+
+        latency["direction1"]["100"] = dict(zip(keys, latencies[0]))
+        latency["direction2"]["100"] = dict(zip(keys, latencies[1]))
+        if test_type == "NDR":
+            latency["direction1"]["50"] = dict(zip(keys, latencies[2]))
+            latency["direction2"]["50"] = dict(zip(keys, latencies[3]))
+            latency["direction1"]["10"] = dict(zip(keys, latencies[4]))
+            latency["direction2"]["10"] = dict(zip(keys, latencies[5]))
+
+        return latency
+
+    def visit_suite(self, suite):
+        """Implements traversing through the suite and its direct children.
+
+        :param suite: Suite to process.
+        :type suite: Suite
+        :returns: Nothing.
+        """
+        if self.start_suite(suite) is not False:
+            suite.suites.visit(self)
+            suite.tests.visit(self)
+            self.end_suite(suite)
+
+    def start_suite(self, suite):
+        """Called when suite starts.
+
+        :param suite: Suite to process.
+        :type suite: Suite
+        :returns: Nothing.
+        """
+
+        suite_name = suite.name.lower().replace('"', "'")
+        self._data["suites"][suite_name] = \
+            {"doc": suite.doc.replace('"', "'").replace('\n', ' ').
+                    replace('\r', '').replace('*[', '\n *[')}
+
+        suite.keywords.visit(self)
+
+    def end_suite(self, suite):
+        """Called when suite ends.
+
+        :param suite: Suite to process.
+        :type suite: Suite
+        :returns: Nothing.
+        """
+        pass
+
+    def visit_test(self, test):
+        """Implements traversing through the test.
+
+        :param test: Test to process.
+        :type test: Test
+        :returns: Nothing.
+        """
+        if self.start_test(test) is not False:
+            test.keywords.visit(self)
+            self.end_test(test)
+
+    def start_test(self, test):
+        """Called when test starts.
+
+        :param test: Test to process.
+        :type test: Test
+        :returns: Nothing.
+        """
+
+        tags = [str(tag) for tag in test.tags]
+        if test.status == "PASS" and "NDRPDRDISC" in tags:
+
+            if "NDRDISC" in tags:
+                test_type = "NDR"
+            elif "PDRDISC" in tags:
+                test_type = "PDR"
+            else:
+                return
+
+            try:
+                rate_value = str(re.search(
+                    self.REGEX_RATE, test.message).group(1))
+            except AttributeError:
+                rate_value = "-1"
+            try:
+                rate_unit = str(re.search(
+                    self.REGEX_RATE, test.message).group(2))
+            except AttributeError:
+                rate_unit = "-1"
+
+            test_result = dict()
+            test_result["name"] = test.name.lower()
+            test_result["parent"] = test.parent.name.lower()
+            test_result["tags"] = tags
+            test_result["type"] = test_type
+            test_result["throughput"] = dict()
+            test_result["throughput"]["value"] = int(rate_value.split('.')[0])
+            test_result["throughput"]["unit"] = rate_unit
+            test_result["latency"] = self._get_latency(test.message, test_type)
+            if test_type == "PDR":
+                test_result["lossTolerance"] = str(re.search(
+                    self.REGEX_TOLERANCE, test.message).group(1))
+
+            self._test_ID = test.longname.lower()
+
+            self._data["tests"][self._test_ID] = test_result
+
+    def end_test(self, test):
+        """Called when test ends.
+
+        :param test: Test to process.
+        :type test: Test
+        :returns: Nothing.
+        """
+        pass
+
+    def visit_keyword(self, keyword):
+        """Implements traversing through the keyword and its child keywords.
+
+        :param keyword: Keyword to process.
+        :type keyword: Keyword
+        :returns: Nothing.
+        """
+        if self.start_keyword(keyword) is not False:
+            self.end_keyword(keyword)
+
+    def start_keyword(self, keyword):
+        """Called when keyword starts. Default implementation does nothing.
+
+        :param keyword: Keyword to process.
+        :type keyword: Keyword
+        :returns: Nothing.
+        """
+        try:
+            if keyword.type == "setup":
+                self.visit_setup_kw(keyword)
+            elif keyword.type == "teardown":
+                self.visit_teardown_kw(keyword)
+        except AttributeError:
+            pass
+
+    def end_keyword(self, keyword):
+        """Called when keyword ends. Default implementation does nothing.
+
+        :param keyword: Keyword to process.
+        :type keyword: Keyword
+        :returns: Nothing.
+        """
+        pass
+
+    def visit_setup_kw(self, setup_kw):
+        """Implements traversing through the teardown keyword and its child
+        keywords.
+
+        :param setup_kw: Keyword to process.
+        :type setup_kw: Keyword
+        :returns: Nothing.
+        """
+        for keyword in setup_kw.keywords:
+            if self.start_setup_kw(keyword) is not False:
+                self.visit_setup_kw(keyword)
+                self.end_setup_kw(keyword)
+
+    def start_setup_kw(self, setup_kw):
+        """Called when teardown keyword starts. Default implementation does
+        nothing.
+
+        :param setup_kw: Keyword to process.
+        :type setup_kw: Keyword
+        :returns: Nothing.
+        """
+        if setup_kw.name.count("Vpp Show Version Verbose") \
+                and not self._version:
+            self._msg_type = "setup-version"
+            setup_kw.messages.visit(self)
+
+    def end_setup_kw(self, setup_kw):
+        """Called when keyword ends. Default implementation does nothing.
+
+        :param setup_kw: Keyword to process.
+        :type setup_kw: Keyword
+        :returns: Nothing.
+        """
+        pass
+
+    def visit_teardown_kw(self, teardown_kw):
+        """Implements traversing through the teardown keyword and its child
+        keywords.
+
+        :param teardown_kw: Keyword to process.
+        :type teardown_kw: Keyword
+        :returns: Nothing.
+        """
+        for keyword in teardown_kw.keywords:
+            if self.start_teardown_kw(keyword) is not False:
+                self.visit_teardown_kw(keyword)
+                self.end_teardown_kw(keyword)
+
+    def start_teardown_kw(self, teardown_kw):
+        """Called when teardown keyword starts. Default implementation does
+        nothing.
+
+        :param teardown_kw: Keyword to process.
+        :type teardown_kw: Keyword
+        :returns: Nothing.
+        """
+
+        if teardown_kw.name.count("Show Vat History On All Duts"):
+            self._vat_history_lookup_nr = 0
+            self._msg_type = "teardown-vat-history"
+        elif teardown_kw.name.count("Vpp Show Runtime"):
+            self._show_run_lookup_nr = 0
+            self._msg_type = "teardown-show-runtime"
+
+        if self._msg_type:
+            teardown_kw.messages.visit(self)
+
+    def end_teardown_kw(self, teardown_kw):
+        """Called when keyword ends. Default implementation does nothing.
+
+        :param teardown_kw: Keyword to process.
+        :type teardown_kw: Keyword
+        :returns: Nothing.
+        """
+        pass
+
+    def visit_message(self, msg):
+        """Implements visiting the message.
+
+        :param msg: Message to process.
+        :type msg: Message
+        :returns: Nothing.
+        """
+        if self.start_message(msg) is not False:
+            self.end_message(msg)
+
+    def start_message(self, msg):
+        """Called when message starts. Get required information from messages:
+        - VPP version.
+
+        :param msg: Message to process.
+        :type msg: Message
+        :returns: Nothing.
+        """
+
+        if self._msg_type:
+            self.parse_msg[self._msg_type](msg)
+
+    def end_message(self, msg):
+        """Called when message ends. Default implementation does nothing.
+
+        :param msg: Message to process.
+        :type msg: Message
+        :returns: Nothing.
+        """
+        pass
+
+
+class InputData(object):
+    """Input data
+
+    The data is extracted from output.xml files generated by Jenkins jobs and
+    stored in pandas' DataFrames.
+
+    The data structure:
+    - job name
+      - build number
+        - metadata
+          - job
+          - build
+          - vpp version
+        - suites
+        - tests
+          - ID: test data (as described in ExecutionChecker documentation)
+    """
+
+    def __init__(self, config):
+        """Initialization.
+        """
+
+        # Configuration:
+        self._cfg = config
+
+        # Data store:
+        self._input_data = None
+
+    @property
+    def data(self):
+        """Getter - Input data.
+
+        :returns: Input data
+        :rtype: pandas.Series
+        """
+        return self._input_data
+
+    def metadata(self, job, build):
+        """Getter - metadata
+
+        :param job: Job which metadata we want.
+        :param build: Build which metadata we want.
+        :type job: str
+        :type build: str
+        :returns: Metadata
+        :rtype: pandas.Series
+        """
+
+        return self.data[job][build]["metadata"]
+
+    def suites(self, job, build):
+        """Getter - suites
+
+        :param job: Job which suites we want.
+        :param build: Build which suites we want.
+        :type job: str
+        :type build: str
+        :returns: Suites.
+        :rtype: pandas.Series
+        """
+
+        return self.data[job][build]["suites"]
+
+    def tests(self, job, build):
+        """Getter - tests
+
+        :param job: Job which tests we want.
+        :param build: Build which tests we want.
+        :type job: str
+        :type build: str
+        :returns: Tests.
+        :rtype: pandas.Series
+        """
+
+        return self.data[job][build]["tests"]
+
+    @staticmethod
+    def _parse_tests(job, build):
+        """Process data from robot output.xml file and return JSON structured
+        data.
+
+        :param job: The name of job which build output data will be processed.
+        :param build: The build which output data will be processed.
+        :type job: str
+        :type build: dict
+        :returns: JSON data structure.
+        :rtype: dict
+        """
+
+        with open(build["file-name"], 'r') as data_file:
+            result = ExecutionResult(data_file)
+        checker = ExecutionChecker(job=job, build=build)
+        result.visit(checker)
+
+        return checker.data
+
+    def parse_input_data(self):
+        """Parse input data from input files and store in pandas' Series.
+        """
+
+        logging.info("Parsing input files ...")
+
+        job_data = dict()
+        for job, builds in self._cfg.builds.items():
+            logging.info("  Extracting data from the job '{0}' ...'".
+                         format(job))
+            builds_data = dict()
+            for build in builds:
+                logging.info("    Extracting data from the build '{0}'".
+                             format(build["build"]))
+                logging.info("    Processing the file '{0}'".
+                             format(build["file-name"]))
+                data = InputData._parse_tests(job, build)
+
+                build_data = pd.Series({
+                    "metadata": pd.Series(data["metadata"].values(),
+                                          index=data["metadata"].keys()),
+                    "suites": pd.Series(data["suites"].values(),
+                                        index=data["suites"].keys()),
+                    "tests": pd.Series(data["tests"].values(),
+                                       index=data["tests"].keys()),
+                    })
+                builds_data[str(build["build"])] = build_data
+                logging.info("    Done.")
+
+            job_data[job] = pd.Series(builds_data.values(),
+                                      index=builds_data.keys())
+            logging.info("  Done.")
+
+        self._input_data = pd.Series(job_data.values(), index=job_data.keys())
+        logging.info("Done.")
+
+    @staticmethod
+    def _end_of_tag(tag_filter, start=0, closer="'"):
+        """Return the index of character in the string which is the end of tag.
+
+        :param tag_filter: The string where the end of tag is being searched.
+        :param start: The index where the searching is stated.
+        :param closer: The character which is the tag closer.
+        :type tag_filter: str
+        :type start: int
+        :type closer: str
+        :returns: The index of the tag closer.
+        :rtype: int
+        """
+
+        try:
+            idx_opener = tag_filter.index(closer, start)
+            return tag_filter.index(closer, idx_opener + 1)
+        except ValueError:
+            return None
+
+    @staticmethod
+    def _condition(tag_filter):
+        """Create a conditional statement from the given tag filter.
+
+        :param tag_filter: Filter based on tags from the element specification.
+        :type tag_filter: str
+        :returns: Conditional statement which can be evaluated.
+        :rtype: str
+        """
+
+        index = 0
+        while True:
+            index = InputData._end_of_tag(tag_filter, index)
+            if index is None:
+                return tag_filter
+            index += 1
+            tag_filter = tag_filter[:index] + " in tags" + tag_filter[index:]
+
+    def filter_tests_data(self, element, params=None):
+        """Filter required data from the given jobs and builds.
+
+        The output data structure is:
+
+        - job 1
+          - build 1
+            - test 1 ID:
+              - param 1
+              - param 2
+              ...
+              - param n
+            ...
+            - test n ID:
+            ...
+          ...
+          - build n
+        ...
+        - job n
+
+        :param element: Element which will use the filtered data.
+        :param params: Parameters which will be included in the output.
+        :type element: pandas.Series
+        :type params: list
+        :returns: Filtered data.
+        :rtype pandas.Series
+        """
+
+        logging.info("  Creating the data set for the {0} '{1}'.".
+                     format(element["type"], element.get("title", "")))
+
+        cond = InputData._condition(element.get("filter", ""))
+        if cond:
+            logging.debug("  Filter: {0}".format(cond))
+        else:
+            logging.error("  No filter defined.")
+            return None
+
+        if params is None:
+            try:
+                params = element["parameters"]
+            except KeyError:
+                params = None
+
+        data = pd.Series()
+        try:
+            for job, builds in element["data"].items():
+                data[job] = pd.Series()
+                for build in builds:
+                    data[job][str(build)] = pd.Series()
+
+                    for test_ID, test_data in self.tests(job, str(build)).\
+                            iteritems():
+                        if eval(cond, {"tags": test_data["tags"]}):
+                            data[job][str(build)][test_ID] = pd.Series()
+                            if params is None:
+                                for param, val in test_data.items():
+                                    data[job][str(build)][test_ID][param] = val
+                            else:
+                                for param in params:
+                                    data[job][str(build)][test_ID][param] = \
+                                        test_data[param]
+            return data
+
+        except (KeyError, IndexError, ValueError) as err:
+            raise PresentationError("Missing mandatory parameter in the "
+                                    "element specification.", err)
diff --git a/resources/tools/presentation/environment.py b/resources/tools/presentation/environment.py
new file mode 100644 (file)
index 0000000..b57e52c
--- /dev/null
@@ -0,0 +1,153 @@
+# Copyright (c) 2017 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Environment
+
+Setting of the environment according to the configuration specified in the
+configuration YAML file.
+"""
+
+import os
+import shutil
+import logging
+
+from errors import PresentationError
+
+
+class Environment(object):
+    """Setting of the environment:
+    - set environment variables,
+    - create directories.
+    """
+
+    def __init__(self, env, force=False):
+        """Initialization.
+
+        :param env: Environment specification.
+        :param force: If True, remove old build(s) if present.
+        :type env: dict
+        :type force: bool
+        """
+
+        self._env = env
+        self._force = force
+
+    @property
+    def environment(self):
+        """Getter.
+
+        :returns: Environment settings.
+        :rtype: dict
+        """
+        return self._env
+
+    def _set_environment_variables(self):
+        """Set environment variables.
+        """
+        logging.info("Setting the environment variables ...")
+        # logging.debug("Environment variables before:\n{}".format(os.environ))
+
+        count = 1
+
+        for var, value in self._env["configuration"].items():
+            logging.debug("  {:3d} Setting the variable {} = {}".
+                          format(count, var, value))
+            os.environ[var] = str(value)
+            count += 1
+
+        for var, value in self._env["paths"].items():
+            logging.debug("  {:3d} Setting the variable {} = {}".
+                          format(count, var, value))
+            os.environ[var] = str(value)
+            count += 1
+
+        for var, value in self._env["urls"].items():
+            logging.debug("  {:3d} Setting the variable {} = {}".
+                          format(count, var, value))
+            os.environ[var] = str(value)
+            count += 1
+
+        # logging.debug("Environment variables after:\n{}".format(os.environ))
+        logging.info("Done.")
+
+    def _make_dirs(self):
+        """Create the directories specified in the 'make-dirs' part of
+        'environment' section in the configuration file.
+
+        :raises: PresentationError if it is not possible to remove or create a
+        directory.
+        """
+
+        if self._force:
+            logging.info("Removing old build(s) ...")
+            for directory in self._env["build-dirs"]:
+                dir_to_remove = self._env["paths"][directory]
+                if os.path.isdir(dir_to_remove):
+                    try:
+                        shutil.rmtree(dir_to_remove)
+                        logging.info("  Removed: {}".format(dir_to_remove))
+                    except OSError:
+                        raise PresentationError("Cannot remove the directory "
+                                                "'{}'".format(dir_to_remove))
+            logging.info("Done.")
+
+        logging.info("Making directories ...")
+
+        for directory in self._env["make-dirs"]:
+            dir_to_make = self._env["paths"][directory]
+            try:
+                if os.path.isdir(dir_to_make):
+                    logging.warning("The directory '{}' exists, skipping.".
+                                    format(dir_to_make))
+                else:
+                    os.makedirs(dir_to_make)
+                    logging.info("  Created: {}".format(dir_to_make))
+            except OSError:
+                raise PresentationError("Cannot make the directory '{}'".
+                                        format(dir_to_make))
+
+        logging.info("Done.")
+
+    def set_environment(self):
+        """Set the environment.
+        """
+
+        self._set_environment_variables()
+        self._make_dirs()
+
+
+def clean_environment(env):
+    """Clean the environment.
+
+    :param env: Environment specification.
+    :type env: dict
+    :raises: PresentationError if it is not possible to remove a directory.
+    """
+
+    logging.info("Cleaning the environment ...")
+
+    for directory in env["remove-dirs"]:
+        dir_to_remove = env["paths"][directory]
+        logging.info("  Removing the working directory {} ...".
+                     format(dir_to_remove))
+        if os.path.isdir(dir_to_remove):
+            try:
+                shutil.rmtree(dir_to_remove)
+            except OSError:
+                raise PresentationError("Cannot remove the directory '{}'".
+                                        format(dir_to_remove))
+        else:
+            logging.warning("The directory '{}' does not exist.".
+                            format(dir_to_remove))
+
+    logging.info("Done.")
diff --git a/resources/tools/presentation/errors.py b/resources/tools/presentation/errors.py
new file mode 100644 (file)
index 0000000..0d8d5b9
--- /dev/null
@@ -0,0 +1,78 @@
+# Copyright (c) 2017 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Implementation of exceptions used in the Presentation and analytics layer.
+"""
+
+import sys
+import logging
+
+
+class PresentationError(Exception):
+    """Exception(s) raised by the presentation module.
+
+    When raising this exception, put this information to the message in this
+    order:
+     - short description of the encountered problem (parameter msg),
+     - relevant messages if there are any collected, e.g., from caught
+       exception (optional parameter details),
+     - relevant data if there are any collected (optional parameter details).
+    """
+
+    log_exception = {"DEBUG": logging.debug,
+                     "INFO": logging.info,
+                     "WARNING": logging.warning,
+                     "ERROR": logging.error,
+                     "CRITICAL": logging.critical}
+
+    def __init__(self, msg, details='', level="CRITICAL"):
+        """Sets the exception message and the level.
+
+        :param msg: Short description of the encountered problem.
+        :param details: Relevant messages if there are any collected, e.g.,
+        from caught exception (optional parameter details), or relevant data if
+        there are any collected (optional parameter details).
+        :param level: Level of the error, possible choices are: "DEBUG", "INFO",
+        "WARNING", "ERROR" and "CRITICAL".
+        :type msg: str
+        :type details: str
+        :type level: str
+        """
+
+        super(PresentationError, self).__init__()
+        self._msg = msg
+        self._details = details
+        self._level = level
+
+        try:
+            self.log_exception[self._level](self._msg)
+            if self._details:
+                self.log_exception[self._level](self._details)
+        except KeyError:
+            print("Wrong log level.")
+            sys.exit(1)
+
+    def __repr__(self):
+        return repr(self._msg)
+
+    def __str__(self):
+        return str(self._msg)
+
+    @property
+    def level(self):
+        """Getter - logging level.
+
+        :returns: Logging level.
+        :rtype: str
+        """
+        return self._level
diff --git a/resources/tools/presentation/inputs.py b/resources/tools/presentation/inputs.py
new file mode 100644 (file)
index 0000000..b3cb583
--- /dev/null
@@ -0,0 +1,189 @@
+# Copyright (c) 2017 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Inputs
+Download all data.
+"""
+
+import logging
+
+from os import rename, remove
+from os.path import join, getsize
+from zipfile import ZipFile, is_zipfile, BadZipfile
+from httplib import responses
+from requests import get, codes, RequestException, Timeout, TooManyRedirects, \
+    HTTPError, ConnectionError
+
+from errors import PresentationError
+
+
+# Chunk size used for file download
+CHUNK_SIZE = 512
+
+# Separator used in file names
+SEPARATOR = "__"
+
+
+def download_data_files(config):
+    """Download all data specified in the configuration file in the section
+    type: input --> builds.
+
+    :param config: Configuration.
+    :type config: Configuration
+    :raises: PresentationError if there is no url defined for the job.
+    """
+
+    for job, builds in config.builds.items():
+        for build in builds:
+            if job.startswith("csit-"):
+                url = config.environment["urls"]["URL[JENKINS,CSIT]"]
+            elif job.startswith("hc2vpp-"):
+                url = config.environment["urls"]["URL[JENKINS,HC]"]
+            else:
+                raise PresentationError("No url defined for the job '{}'.".
+                                        format(job))
+            file_name = config.input["file-name"]
+            full_name = config.input["download-path"].\
+                format(job=job, build=build["build"], filename=file_name)
+            url = "{0}/{1}".format(url, full_name)
+            new_name = join(
+                config.environment["paths"]["DIR[WORKING,DATA]"],
+                "{job}{sep}{build}{sep}{name}".format(job=job, sep=SEPARATOR,
+                                                      build=build["build"],
+                                                      name=file_name))
+
+            logging.info("Downloading the file '{0}' to '{1}'.".
+                         format(url, new_name))
+
+            status = "failed"
+            try:
+                response = get(url, stream=True)
+                code = response.status_code
+                if code != codes["OK"]:
+                    logging.error("{0}: {1}".format(code, responses[code]))
+                    config.set_input_state(job, build["build"], "not found")
+                    break
+
+                file_handle = open(new_name, "wb")
+                for chunk in response.iter_content(chunk_size=CHUNK_SIZE):
+                    if chunk:
+                        file_handle.write(chunk)
+                file_handle.close()
+
+                expected_length = int(response.headers["Content-Length"])
+                logging.debug("  Expected file size: {0}B".
+                              format(expected_length))
+                real_length = getsize(new_name)
+                logging.debug("  Downloaded size: {0}B".format(real_length))
+
+                if real_length == expected_length:
+                    status = "downloaded"
+                    logging.info("{0}: {1}".format(code, responses[code]))
+                else:
+                    logging.error("The file size differs from the expected "
+                                  "size.")
+            except ConnectionError as err:
+                logging.error("Not possible to connect to '{0}'.".format(url))
+                logging.debug(err)
+            except HTTPError as err:
+                logging.error("Invalid HTTP response from '{0}'.".format(url))
+                logging.debug(err)
+            except TooManyRedirects as err:
+                logging.error("Request exceeded the configured number "
+                              "of maximum re-directions.")
+                logging.debug(err)
+            except Timeout as err:
+                logging.error("Request timed out.")
+                logging.debug(err)
+            except RequestException as err:
+                logging.error("Unexpected HTTP request exception.")
+                logging.debug(err)
+            except (IOError, ValueError, KeyError) as err:
+                logging.error("Download failed.")
+                logging.debug("Reason: {0}".format(err))
+
+            config.set_input_state(job, build["build"], status)
+            config.set_input_file_name(job, build["build"], new_name)
+
+            if status == "failed":
+                logging.info("Removing the file '{0}'".format(new_name))
+                try:
+                    remove(new_name)
+                except OSError as err:
+                    logging.warning(str(err))
+                config.set_input_file_name(job, build["build"], None)
+
+    unzip_files(config)
+
+
+def unzip_files(config):
+    """Unzip downloaded zip files
+
+    :param config: Configuration.
+    :type config: Configuration
+    :raises: PresentationError if the zip file does not exist or it is not a
+    zip file.
+    """
+
+    if config.is_debug:
+        data_file = config.debug["extract"]
+    else:
+        data_file = config.input["extract"]
+
+    for job, builds in config.builds.items():
+        for build in builds:
+            try:
+                status = "failed"
+                file_name = build["file-name"]
+                directory = config.environment["paths"]["DIR[WORKING,DATA]"]
+                if build["status"] == "downloaded" and is_zipfile(file_name):
+                    logging.info("Unziping: '{0}' from '{1}'.".
+                                 format(data_file, file_name))
+                    new_name = "{0}{1}{2}".format(file_name.rsplit('.')[-2],
+                                                  SEPARATOR, data_file)
+                    try:
+                        with ZipFile(file_name, 'r') as zip_file:
+                            zip_file.extract(data_file, directory)
+                        logging.info("Renaming the file '{0}' to '{1}'".
+                                     format(data_file, new_name))
+                        rename(join(directory, data_file), new_name)
+                        status = "unzipped"
+                        config.set_input_state(job, build["build"], status)
+                        config.set_input_file_name(job, build["build"],
+                                                   new_name)
+                    except (BadZipfile, RuntimeError) as err:
+                        logging.error("Failed to unzip the file '{0}': {1}.".
+                                      format(file_name, str(err)))
+                    except OSError as err:
+                        logging.error("Failed to rename the file '{0}': {1}.".
+                                      format(data_file, str(err)))
+                    finally:
+                        logging.info("Removing the file '{0}'".
+                                     format(file_name))
+                        try:
+                            if not config.debug:
+                                remove(file_name)
+                        except OSError as err:
+                            logging.warning(str(err))
+                        if status == "failed":
+                            config.set_input_file_name(job, build["build"],
+                                                       None)
+                else:
+                    raise PresentationError("The file '{0}' does not exist or "
+                                            "it is not a zip file".
+                                            format(file_name))
+
+                config.set_input_state(job, build["build"], status)
+
+            except KeyError:
+                pass
diff --git a/resources/tools/presentation/lld.rst b/resources/tools/presentation/lld.rst
new file mode 100644 (file)
index 0000000..40ac2f5
--- /dev/null
@@ -0,0 +1,723 @@
+===================================================
+Presentation and Analytics Layer - Low Level Design
+===================================================
+
+Table of content
+----------------
+
+ .. toctree:: .
+    :maxdepth: 3
+
+
+Overview
+--------
+
+The presentation and analytics layer (PAL) is the fourth layer of CSIT
+hierarchy. The model of presentation and analytics layer consists of four
+sub-layers, from bottom to top:
+
+ - sL1 - Data - input data to be processed:
+
+   - Static content - .rst text files, .svg static figures, and other files
+     stored in the CSIT git repository.
+   - Data to process - .xml files generated by Jenkins jobs executing tests,
+     stored as robot results files (output.xml).
+   - Specification - .yaml file with the models of report elements (tables,
+     plots, layout, ...) generated by this tool. There is also the configuration
+     of the tool and the specification of input data (jobs and builds).
+
+ - sL2 - Data processing
+
+   - The data are read from the specified input files (.xml) and stored as
+     multi-indexed `pandas.Series <https://pandas.pydata.org/pandas-docs/stable/
+     generated/pandas.Series.html>`_.
+   - This layer provides also interface to input data and filtering of the input
+     data.
+
+ - sL3 - Data presentation - This layer generates the elements specified in the
+   specification file:
+
+   - Tables: .csv files linked to static .rst files
+   - Plots: .html files generated using plot.ly linked to static .rst files
+
+ - sL4 - Report generation - Sphinx generates required formats and versions:
+
+   - formats: html, pdf
+   - versions: minimal, full (TODO: define the names and scope of versions)
+
+
+Data
+----
+
+Report Specification
+````````````````````
+
+The report specification file defines which data is used and which outputs are
+generated. It is human readable and structured. It is easy to add / remove /
+change items. The specification includes:
+
+ - Specification of the environment
+ - Configuration of debug mode (optional)
+ - Specification of input data (jobs, builds, files, ...)
+ - Specification of the output
+ - What and how is generated
+   - What: plots, tables
+   - How: specification of all properties and parameters
+ - .yaml format
+
+Structure of the specification file
+'''''''''''''''''''''''''''''''''''
+
+The specification file is organized as a list of dictionaries distinguished by
+the type:
+
+ | -
+ |   type: "environment"
+ |
+ | -
+ |   type: "debug"
+ |
+ | -
+ |   type: "input"
+ |
+ | -
+ |   type: "output"
+ |
+ | -
+ |   type: "table"
+ |
+ | -
+ |   type: "plot"
+
+Each type represents a section. The sections "environment", "debug", "input" and
+"output" are only once in the specification; "table" and "plot" can be there
+multiple times.
+
+Sections "debug", "table" and "plot" are optional.
+
+Table(s) and plot(s) are referred as "elements" in this text. It is possible to
+define and implement other elements if needed.
+
+
+Section: Environment
+''''''''''''''''''''
+
+This section has these parts:
+
+ - type: "environment" - says that this is the section "environment"
+ - configuration - configuration of the PAL
+ - paths - paths used by the PAL
+ - urls - urls pointing to the data sources
+ - make-dirs - a list of the directories to be created by the PAL while
+   preparing the environment
+ - remove-dirs - a list of the directories to be removed while cleaning the
+   environment
+ - build-dirs - a list of the directories where the results are stored
+
+The structure of the section "Environment" is as follows (example):
+
+ | -
+ |   type: "environment"
+ |   configuration:
+ |     # Debug mode:
+ |     # If the section "type: debug" is missing, CFG[DEBUG] is set to 0.
+ |     CFG[DEBUG]: 1
+ |
+ |   paths:
+ |     DIR[WORKING]: "_tmp"
+ |     DIR[BUILD,HTML]: "_build"
+ |     DIR[BUILD,LATEX]: "_build_latex"
+ |     DIR[RST]: "../../../docs/report"
+ |
+ |     DIR[WORKING,DATA]: "{DIR[WORKING]}/data"
+ |
+ |     DIR[STATIC,VPP]: "{DIR[STATIC]}/vpp"
+ |     DIR[STATIC,ARCH]: "{DIR[STATIC]}/archive"
+ |     DIR[STATIC,TREND]: "{DIR[STATIC]}/trending"
+ |
+ |     DIR[PLOT,DPDK]: "{DIR[WORKING]}/dpdk_plot"
+ |
+ |     DIR[DTR]: "{DIR[RST]}/detailed_test_results"
+ |     DIR[DTR,PERF,DPDK]: "{DIR[DTR]}/dpdk_performance_results"
+ |     DIR[DTR,PERF,VPP]: "{DIR[DTR]}/vpp_performance_results"
+ |     DIR[DTR,PERF,HC]: "{DIR[DTR]}/honeycomb_performance_results"
+ |     DIR[DTR,FUNC,VPP]: "{DIR[DTR]}/vpp_functional_results"
+ |     DIR[DTR,FUNC,HC]: "{DIR[DTR]}/honeycomb_functional_results"
+ |     DIR[DTR,FUNC,NSHSFC]: "{DIR[DTR]}/nshsfc_functional_results"
+ |     DIR[DTR,PERF,VPP,IMPRV]: "{DIR[RST]}/vpp_performance_tests/performance_improvements"
+ |
+ |     DIR[DTC]: "{DIR[RST]}/test_configuration"
+ |     DIR[DTC,PERF,VPP]: "{DIR[DTC]}/vpp_performance_configuration"
+ |     DIR[DTC,FUNC,VPP]: "{DIR[DTC]}/vpp_functional_configuration"
+ |
+ |     DIR[DTO]: "{DIR[RST]}/test_operational_data"
+ |     DIR[DTO,PERF,VPP]: "{DIR[DTO]}/vpp_performance_operational_data"
+ |
+ |     DIR[CSS_PATCH_FILE]: "{DIR[STATIC]}/theme_overrides.css"
+ |
+ |   urls:
+ |     URL[JENKINS,CSIT]: "https://jenkins.fd.io/view/csit/job"
+ |     URL[JENKINS,HC]: "https://jenkins.fd.io/view/hc2vpp/job"
+ |
+ |   make-dirs:
+ |   # List the directories which are created while preparing the environment.
+ |   # All directories MUST be defined in "paths" section.
+ |   - "DIR[WORKING,DATA]"
+ |   - "DIR[STATIC,VPP]"
+ |   - "DIR[STATIC,DPDK]"
+ |   - "DIR[STATIC,ARCH]"
+ |   - "DIR[STATIC,TREND]"
+ |   - "DIR[PLOT,VPP]"
+ |   - "DIR[PLOT,DPDK]"
+ |   - "DIR[BUILD,LATEX]"
+ |
+ |   remove-dirs:
+ |   # List the directories which are deleted while cleaning the environment.
+ |   # All directories MUST be defined in "paths" section.
+ |   - "DIR[WORKING]"
+ |
+ |   build-dirs:
+ |   # List the directories where the results (build) is stored.
+ |   # All directories MUST be defined in "paths" section.
+ |   - "DIR[BUILD,HTML]"
+ |   - "DIR[BUILD,LATEX]"
+
+It is possible to use defined items in the definition of other items, e.g.:
+
+ | DIR[WORKING,DATA]: "{DIR[WORKING]}/data"
+
+will be automatically changed to
+
+ | DIR[WORKING,DATA]: "_tmp/data"
+
+
+Section: Debug mode
+'''''''''''''''''''
+
+This section is optional and it configures the debug mode. It is used if we
+do not want to download data files and use local files instead of them.
+
+If the debug mode is configured, the "input" section is ignored.
+
+This section has these parts:
+
+ - type: "debug" - says that this is the section "debug"
+ - general
+
+   - input-format - xml or zip
+   - extract - if "zip" is defined as the input format, this file is extracted
+     from the zip file, otherwise this parameter is ignored
+
+ - builds - list of builds which data is used. There must be defined the job
+   name as the key and then list of builds and their output files.
+
+The structure of the section "Debug" is as follows (example):
+
+ | -
+ |   type: "debug"
+ |   general:
+ |     input-format: "xml"  # zip or xml
+ |     extract: "output.xml"  # Only for zip
+ |   builds:
+ |     # The files must be in the directory DIR[WORKING,DATA]
+ |     csit-vpp-perf-1704-all:
+ |     -
+ |       build: 17
+ |       file: "{DIR[WORKING,DATA]}/csit-vpp-perf-1707-all__17__output.xml"
+
+
+Section: Input
+''''''''''''''
+
+This section is mandatory if the debug mode is not used, and defines the data
+which will be used to generate elements.
+
+This section has these parts:
+
+ - type: "input" - says that this section is the "input"
+ - general - parameters common to all builds:
+
+   - file-name: file to be downloaded
+   - download-path: path to be added to url pointing to the file, e.g.:
+     "{job}/{build}/robot/report/*zip*/{filename}"; {job}, {build} and
+     {filename} are replaced by proper values defined in this section
+   - extract: file to be extracted from downloaded zip file, e.g.: "output.xml";
+     if xml file is downloaded, this parameter is ignored.
+
+ - builds - list of jobs (keys) and builds which output data will be downloaded
+
+The structure of the section "Input" is as follows (example from 17.07 report):
+
+ | -
+ |   type: "input"  # Ignored in the debug mode
+ |   general:
+ |     file-name: "robot-plugin.zip"
+ |     download-path: "{job}/{build}/robot/report/*zip*/{filename}"
+ |     extract: "output.xml"
+ |   builds:
+ |     csit-vpp-perf-1707-all:
+ |     - 9
+ |     - 10
+ |     - 13
+ |     - 14
+ |     - 15
+ |     - 16
+ |     - 17
+ |     - 18
+ |     - 19
+ |     - 21
+ |     - 22
+ |     csit-dpdk-perf-1704-all:
+ |     - 1
+ |     - 2
+ |     - 3
+ |     - 4
+ |     - 5
+ |     - 6
+ |     - 7
+ |     - 8
+ |     - 9
+ |     - 10
+ |     csit-vpp-functional-1707-ubuntu1604-virl:
+ |     - lastSuccessfulBuild
+ |     hc2vpp-csit-perf-master-ubuntu1604:
+ |     - 8
+ |     - 9
+ |     hc2vpp-csit-integration-1707-ubuntu1604:
+ |     - lastSuccessfulBuild
+ |     csit-nsh_sfc-verify-func-1707-ubuntu1604-virl:
+ |     - 2
+ |     csit-vpp-perf-1704-all:
+ |     - 6
+ |     - 7
+ |     - 8
+ |     - 9
+ |     - 10
+ |     - 12
+ |     - 14
+ |     - 15
+ |     - 16
+ |     - 17
+ |     csit-dpdk-perf-1704-all:
+ |     - 1
+ |     - 2
+ |     - 3
+ |     - 4
+ |     - 6
+ |     - 7
+ |     - 8
+ |     - 9
+ |     - 10
+ |     - 11
+
+
+Section: Output
+'''''''''''''''
+
+This section specifies which format(s) will be generated (html, pdf) and which
+versions for each format will be generated.
+
+This section has these parts:
+
+ - type: "output" - says that this section is the "output"
+ - format: html or pdf
+ - version: defined for each format separately
+
+The structure of the section "Output" is as follows (example):
+
+ | -
+ |   type: "output"
+ |   format:
+ |     html:
+ |     - full
+ |     pdf:
+ |     - full
+ |     - minimal
+
+TODO: define the names of versions
+
+
+Content of "minimal" version
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+TODO: define the name and content of this version
+
+
+Section: Table
+''''''''''''''
+
+This section defines a table to be generated. There can be 0 or more "table"
+sections.
+
+This section has these parts:
+
+ - type: "table" - says that this section defines a table
+ - algorithm: Algorithm which is used to generate the table. The other
+   parameters in this section must provide all information needed by the used
+   algorithm.
+ - template: (optional) a .csv file used as a template while generating the
+   table
+ - output-file-format: (optional) format of the output file.
+ - output-file: file which the table will be written to
+ - columns: specification of table columns
+ - data: Specify the jobs and builds which data is used to generate the table
+ - filter: filter based on tags applied on the input data
+ - parameters: Only these parameters will be put to the output data structure
+
+The structure of the section "Table" is as follows (example):
+
+ | -
+ |   type: "table"
+ |   title: "Performance improvments"
+ |   algoritm: "performance-improvements"
+ |   template: "templates/tmpl_performance_improvements.csv"
+ |   output-file-format: "csv"
+ |   output-file: "{DIR[WORKING]}/path/to/my_table.csv"
+ |   columns:
+ |   -
+ |     title: "VPP Functionality"
+ |     data: "template 2"
+ |   -
+ |     title: "Test Name"
+ |     data: "template 3"
+ |   -
+ |     title: "VPP-17.04 mean [Mpps]"
+ |     data: "vpp 1704 performance mean"
+ |   -
+ |     title: "VPP-17.07 mean [Mpps]"
+ |     data: "vpp 1707 performance mean"
+ |   -
+ |     title: "VPP-17.07 stdev [Mpps]"
+ |     data: "vpp 1707 performance stdev"
+ |   -
+ |     title: "17.04 to 17.07 change"
+ |     data: "change-relative 4 5"
+ |   rows: "generated"
+ |   data:
+ |     csit-vpp-perf-1707-all:
+ |     - 13
+ |     - 16
+ |     - 17
+ |   # Keep this formatting, the filter is enclosed with " (quotation mark) and
+ |   # each tag is enclosed with ' (apostrophe).
+ |   filter: "'64B' and '1T1C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST'"
+ |   parameters:
+ |   - "throughput"
+ |   - "latency"
+
+
+Section: Plot
+'''''''''''''
+
+This section defines a plot to be generated. There can be 0 or more "plot"
+sections.
+
+This section has these parts:
+
+ - type: "plot" - says that this section defines a plot
+ - output-file-format: (optional) format of the output file.
+ - output-file: file which the plot will be written to
+ - plot-type: Type of the plot. The other parameters in this section must
+   provide all information needed by plot.ly to generate the plot. For example:
+
+   - x-axis: x-axis title
+   - y-axis: y-axis title
+
+ - data: Specify the jobs and builds which data is used to generate the plot
+ - filter: filter applied on the input data
+
+The structure of the section "Plot" is as follows (example):
+
+ | -
+ |   type: "plot"
+ |   plot-type: "performance-box"   # box, line
+ |   output-file-type: "html"
+ |   output-file: "{DIR[WORKING]}/path/to/my_plot.html"
+ |   plot-title: "plot title"
+ |   x-axis: "x-axis title"
+ |   y-axis: "y-axis title"
+ |   data:
+ |     csit-vpp-perf-1707-all:
+ |     - 9
+ |     - 10
+ |     - 13
+ |     - 14
+ |     - 15
+ |     - 16
+ |     - 17
+ |     - 18
+ |     - 19
+ |     - 21
+ |   filter:
+ |     - "'64B' and 'BASE' and 'NDRDISC' and '1T1C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST'"
+
+
+Static content
+``````````````
+
+ - Manually created / edited files
+ - .rst files, static .csv files, static pictures (.svg), ...
+ - Stored in CSIT gerrit
+
+No more details about the static content in this document.
+
+
+Data to process
+```````````````
+
+The PAL processes tests results and other information produced by Jenkins jobs.
+The data are now stored as robot results in Jenkins (TODO: store the data in
+nexus) either as .zip and / or .xml files.
+
+
+Data processing
+---------------
+
+As the first step, the data are downloaded and stored locally (typically on a
+Jenkins slave). If .zip files are used, the given .xml files are extracted for
+further processing.
+
+Parsing of the .xml files is performed by a class derived from
+"robot.api.ResultVisitor", only necessary methods are overridden. All and only
+necessary data is extracted from .xml file and stored in a structured form.
+
+The parsed data are stored as the multi-indexed pandas.Series data type. Its
+structure is as follows:
+
+ | <job name>
+ |   <build>
+ |     <metadata>
+ |     <suites>
+ |     <tests>
+
+"job name", "build", "metadata", "suites", "tests" are indexes to access the
+data. For example:
+
+ | data =
+ |
+ | job 1 name:
+ |   build 1:
+ |     metadata: metadata
+ |     suites: suites
+ |     tests: tests
+ |   ...
+ |   build N:
+ |     metadata: metadata
+ |     suites: suites
+ |     build 1: tests
+ | ...
+ | job M name:
+ |   build 1:
+ |     metadata: metadata
+ |     suites: suites
+ |     tests: tests
+ |   ...
+ |   build N:
+ |     metadata: metadata
+ |     suites: suites
+ |     tests: tests
+
+Using indexes data["job 1 name"]["build 1"]["tests"] (e.g.:
+data["csit-vpp-perf-1704-all"]["17"]["tests"]) we get a list of all tests with
+all tests data.
+
+Data will not be accessible directly using indexes, but using getters and
+filters.
+
+**Structure of metadata:**
+
+ | "metadata": {
+ |     "version": "VPP version",
+ |     "job": "Jenkins job name"
+ |     "build": "Information about the build"
+ | },
+
+**Structure of suites:**
+
+ | "suites": {
+ |     "Suite name 1": {
+ |         "doc": "Suite 1 documentation"
+ |     }
+ |     "Suite name N": {
+ |         "doc": "Suite N documentation"
+ |     }
+
+**Structure of tests:**
+
+ | "tests": {
+ |     "ID": {
+ |         "name": "Test name",
+ |         "parent": "Name of the parent of the test",
+ |         "tags": ["tag 1", "tag 2", "tag n"],
+ |         "type": "PDR" | "NDR",
+ |         "throughput": {
+ |             "value": int,
+ |             "unit": "pps" | "bps" | "percentage"
+ |         },
+ |         "latency": {
+ |             "direction1": {
+ |                 "100": {
+ |                     "min": int,
+ |                     "avg": int,
+ |                     "max": int
+ |                 },
+ |                 "50": {  # Only for NDR
+ |                     "min": int,
+ |                     "avg": int,
+ |                     "max": int
+ |                 },
+ |                 "10": {  # Only for NDR
+ |                     "min": int,
+ |                     "avg": int,
+ |                     "max": int
+ |                 }
+ |             },
+ |             "direction2": {
+ |                 "100": {
+ |                     "min": int,
+ |                     "avg": int,
+ |                     "max": int
+ |                 },
+ |                 "50": {  # Only for NDR
+ |                     "min": int,
+ |                     "avg": int,
+ |                     "max": int
+ |                 },
+ |                 "10": {  # Only for NDR
+ |                     "min": int,
+ |                     "avg": int,
+ |                     "max": int
+ |                 }
+ |             }
+ |         },
+ |         "lossTolerance": "lossTolerance"  # Only for PDR
+ |         "vat-history": {
+ |             "DUT1": " DUT1 VAT History",
+ |             "DUT2": " DUT2 VAT History"
+ |         },
+ |         "show-run": "Show Run"
+ |     },
+ |     "ID" {
+ |         # next test
+ |     }
+
+Note: ID is the lowercase full path to the test.
+
+
+Data filtering
+``````````````
+
+The first step when generating an element is getting the data needed to
+construct the element. The data are filtered from the processed input data.
+
+The data filtering is based on:
+
+ - job name(s)
+ - build number(s)
+ - tag(s)
+ - required data - only this data is included in the output.
+
+WARNING: The filtering is based on tags, so be careful with tagging.
+
+For example, the element which specification includes:
+
+ |   data:
+ |     csit-vpp-perf-1707-all:
+ |     - 9
+ |     - 10
+ |     - 13
+ |     - 14
+ |     - 15
+ |     - 16
+ |     - 17
+ |     - 18
+ |     - 19
+ |     - 21
+ |   filter:
+ |     - "'64B' and 'BASE' and 'NDRDISC' and '1T1C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST'"
+
+will be constructed using data from the job "csit-vpp-perf-1707-all", all listed
+builds and the tests which list of tags fulfils the condition specified in the
+filter.
+
+The output data structure for filtered test data is:
+
+ | - job 1
+ |   - build 1
+ |     - test 1 ID:
+ |       - parameter 1
+ |       - parameter 2
+ |       ...
+ |       - parameter n
+ |     ...
+ |     - test n ID:
+ |     ...
+ |   ...
+ |   - build n
+ | ...
+ | - job n
+
+
+Data analytics
+``````````````
+
+Data analytics part implements:
+
+ - methods to compute statistical data from the filtered input data
+ - trending
+ - etc.
+
+
+Data presentation
+-----------------
+
+Generates the plots an tables according to the report models specified in
+specification file. The elements are generated using algorithms and data
+specified in their models.
+
+Tables
+``````
+
+ - tables are generated by algorithms implemented in PAL, the model includes the
+   algorithm and all necessary information.
+ - output format: csv
+ - generated tables are stored in specified directories and linked to .rst files
+
+
+Plots
+`````
+
+ - `plot.ly <https://plot.ly/>`_ is currently used to generate plots, the model
+   includes the type of plot and all necessary information.
+ - output format: html
+ - generated plots are stored in specified directories and linked to .rst files
+
+
+Report generation
+-----------------
+
+Report is generated using Sphinx and Read the docs template. PAL generates html
+and pdf format. It is possible to define the content of report by specifying
+the version (TODO: define the names and content of versions)
+
+The process
+```````````
+
+1. Read the specification
+2. Read the input data
+3. Process the input data
+4. For element (plot, table) defined in specification:
+
+   a. Get the data needed to construct the element using a filter
+   b. Generate the element
+   c. Store the element
+
+5. Generate the report
+6. Store the report (Nexus)
+
+The process is model driven. The elements’ models (tables, plots and report
+itself) are defined in the specification file. Script reads the elements’ models
+from specification file and generates the elements.
+
+It is easy to add elements to be generated, if a new kind of element is
+required, only a new algorithm is implemented and integrated.
diff --git a/resources/tools/presentation/presentation.py b/resources/tools/presentation/presentation.py
new file mode 100644 (file)
index 0000000..2111151
--- /dev/null
@@ -0,0 +1,111 @@
+# Copyright (c) 2017 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""CSIT Presentation and analytics layer.
+
+TODO: Write the description.
+
+TODO: Write the description and specification of YAML configuration file.
+
+"""
+
+import sys
+import argparse
+import logging
+
+from errors import PresentationError
+from environment import Environment, clean_environment
+from configuration import Configuration
+from inputs import download_data_files, unzip_files
+from data import InputData
+
+
+def parse_args():
+    """Parse arguments from cmd line.
+
+    :returns: Parsed arguments.
+    :rtype: ArgumentParser
+    """
+
+    parser = argparse.ArgumentParser(description=__doc__,
+                                     formatter_class=argparse.
+                                     RawDescriptionHelpFormatter)
+    parser.add_argument("-c", "--configuration",
+                        required=True,
+                        type=argparse.FileType('r'),
+                        help="Configuration YAML file.")
+    parser.add_argument("-l", "--logging",
+                        choices=["DEBUG", "INFO", "WARNING",
+                                 "ERROR", "CRITICAL"],
+                        default="ERROR",
+                        help="Logging level.")
+    parser.add_argument("-f", "--force",
+                        action='store_true',
+                        help="Force removing the old build(s) if present.")
+
+    return parser.parse_args()
+
+
+def main():
+    """Main function."""
+
+    log_levels = {"NOTSET": logging.NOTSET,
+                  "DEBUG": logging.DEBUG,
+                  "INFO": logging.INFO,
+                  "WARNING": logging.WARNING,
+                  "ERROR": logging.ERROR,
+                  "CRITICAL": logging.CRITICAL}
+
+    args = parse_args()
+    logging.basicConfig(format='%(asctime)s: %(levelname)s: %(message)s',
+                        datefmt='%Y/%m/%d %H:%M:%S',
+                        level=log_levels[args.logging])
+
+    logging.info("Application started.")
+    try:
+        config = Configuration(args.configuration)
+        config.parse_cfg()
+    except PresentationError:
+        logging.critical("Finished with error.")
+        sys.exit(1)
+
+    try:
+        env = Environment(config.environment, args.force)
+        env.set_environment()
+
+        if config.is_debug:
+            if config.debug["input-format"] == "zip":
+                unzip_files(config)
+        else:
+            download_data_files(config)
+
+        data = InputData(config)
+        data.parse_input_data()
+
+        logging.info("Successfully finished.")
+
+    except (KeyError, ValueError, PresentationError) as err:
+        logging.info("Finished with an error.")
+        logging.critical(str(err))
+    except Exception as err:
+        logging.info("Finished with an error.")
+        logging.critical(str(err))
+
+    finally:
+        if config is not None and not config.is_debug:
+            clean_environment(config.environment)
+        sys.exit(1)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/resources/tools/presentation/templates/tmpl_performance_improvements.csv b/resources/tools/presentation/templates/tmpl_performance_improvements.csv
new file mode 100644 (file)
index 0000000..08b92de
--- /dev/null
@@ -0,0 +1,47 @@
+tests.vpp.perf.ip6.10ge2p1x520-ethip6-ip6base-ndrpdrdisc.tc01-78b-1t1c-ethip6-ip6base-ndrdisc,IPv6,10ge2p1x520: 78B-1t1c-ethip6-ip6base-ndrdisc
+tests.vpp.perf.ip6.10ge2p1x520-ethip6-ip6base-copwhtlistbase-ndrpdrdisc.tc01-78b-1t1c-ethip6-ip6base-copwhtlistbase-ndrdisc,IPv6 COP,10ge2p1x520: 78B-1t1c-ethip6-ip6base-copwhtlistbase-ndrdisc
+tests.vpp.perf.ip6.10ge2p1x520-ethip6-ip6base-iacldstbase-ndrpdrdisc.tc01-78b-1t1c-ethip6-ip6base-iacldstbase-ndrdisc,IPv6 iAcl,10ge2p1x520: 78B-1t1c-ethip6-ip6base-iacldstbase-ndrdisc
+tests.vpp.perf.ip6.10ge2p1x520-ethip6-ip6scale2m-ndrpdrdisc.tc01-78b-1t1c-ethip6-ip6scale2m-ndrdisc,IPv6 FIB 2M,10ge2p1x520: 78B-1t1c-ethip6-ip6scale2m-ndrdisc
+tests.vpp.perf.l2.40ge2p1xl710-eth-l2xcbase-ndrpdrdisc.tc01-64b-1t1c-eth-l2xcbase-ndrdisc,L2XC,10ge2p1xl710: 64B-1t1c-eth-l2xcbase-ndrdisc
+tests.vpp.perf.ip4.10ge2p1x520-ethip4-ip4base-copwhtlistbase-ndrpdrdisc.tc01-64b-1t1c-ethip4-ip4base-copwhtlistbase-ndrdisc,IPv4 COP,10ge2p1x520: 64B-1t1c-ethip4-ip4base-copwhtlistbase-ndrdisc
+tests.vpp.perf.ip4.10ge2p1x520-ethip4-ip4scale2m-ndrpdrdisc.tc01-64b-1t1c-ethip4-ip4scale2m-ndrdisc,IPv4 FIB 2M,10ge2p1x520: 64B-1t1c-ethip4-ip4scale2m-ndrdisc
+tests.vpp.perf.ip6.10ge2p1x520-ethip6-ip6scale20k-ndrpdrdisc.tc01-78b-1t1c-ethip6-ip6scale20k-ndrdisc,IPv6 FIB 20k,10ge2p1x520: 78B-1t1c-ethip6-ip6scale20k-ndrdisc
+tests.vpp.perf.l2.10ge2p1x520-eth-l2xcbase-eth-2memif-1lxc-ndrpdrdisc.tc01-64b-1t1c-eth-l2xcbase-eth-2memif-1lxc-ndrdisc,LXC MEMIF,10ge2p1x520: 64B-1t1c-eth-l2xcbase-eth-2memif-1lxc-ndrdisc
+tests.vpp.perf.l2.10ge2p1x520-eth-l2xcbase-ndrpdrdisc.tc01-64b-1t1c-eth-l2xcbase-ndrdisc,L2XC,10ge2p1x520: 64B-1t1c-eth-l2xcbase-ndrdisc
+tests.vpp.perf.l2.10ge2p1x520-dot1ad-l2xcbase-ndrpdrdisc.tc01-64b-1t1c-dot1ad-l2xcbase-ndrdisc,L2XC dot1ad,10ge2p1x520: 64B-1t1c-dot1ad-l2xcbase-ndrdisc
+tests.vpp.perf.l2.10ge2p1x520-dot1q-l2xcbase-ndrpdrdisc.tc01-64b-1t1c-dot1q-l2xcbase-ndrdisc,L2XC dot1q,10ge2p1x520: 64B-1t1c-dot1q-l2xcbase-ndrdisc
+tests.vpp.perf.ip4 tunnels.10ge2p1x520-ethip4vxlan-l2xcbase-ndrpdrdisc.tc01-64b-1t1c-ethip4vxlan-l2xcbase-ndrdisc,L2XC VxLAN,10ge2p1x520: 64B-1t1c-ethip4vxlan-l2xcbase-ndrdisc
+tests.vpp.perf.l2.10ge2p1x520-eth-l2bdbasemaclrn-ndrpdrdisc.tc01-64b-1t1c-eth-l2bdbasemaclrn-ndrdisc,L2BD,10ge2p1x520: 64B-1t1c-eth-l2bdbasemaclrn-ndrdisc
+tests.vpp.perf.ip4.10ge2p1x520-ethip4-ip4base-ndrpdrdisc.tc01-64b-1t1c-ethip4-ip4base-ndrdisc,IPv4,10ge2p1x520: 64B-1t1c-ethip4-ip4base-ndrdisc
+tests.vpp.perf.ip4.10ge2p1x520-ethip4-ip4base-iacldstbase-ndrpdrdisc.tc01-64b-1t1c-ethip4-ip4base-iacldstbase-ndrdisc,IPv4 iAcl,10ge2p1x520: 64B-1t1c-ethip4-ip4base-iacldstbase-ndrdisc
+tests.vpp.perf.ip4.10ge2p1x520-ethip4-ip4scale200k-ndrpdrdisc.tc01-64b-1t1c-ethip4-ip4scale200k-ndrdisc,IPv4 FIB 200k,10ge2p1x520: 64B-1t1c-ethip4-ip4scale200k-ndrdisc
+tests.vpp.perf.ip4.10ge2p1x520-ethip4-ip4scale20k-ndrpdrdisc.tc01-64b-1t1c-ethip4-ip4scale20k-ndrdisc,IPv4 FIB 20k,10ge2p1x520: 64B-1t1c-ethip4-ip4scale20k-ndrdisc
+tests.vpp.perf.ip4.10ge2p1x520-ethip4-ip4base-ipolicemarkbase-ndrpdrdisc.tc01-64b-1t1c-ethip4-ip4base-ipolicemarkbase-ndrdisc,IPv4 Policer,10ge2p1x520: 64B-1t1c-ethip4-ip4base-ipolicemarkbase-ndrdisc
+tests.vpp.perf.ip6.10ge2p1x520-ethip6-ip6scale200k-ndrpdrdisc.tc01-78b-1t1c-ethip6-ip6scale200k-ndrdisc,IPv6 FIB 200k,10ge2p1x520: 78B-1t1c-ethip6-ip6scale200k-ndrdisc
+tests.vpp.perf.ip4 tunnels.10ge2p1x520-ethip4lispip4-ip4base-ndrpdrdisc.tc01-64b-1t1c-ethip4lispip4-ip4base-ndrdisc,IPv4 LISP,10ge2p1x520: 64B-1t1c-ethip4lispip4-ip4base-ndrdisc
+tests.vpp.perf.vm vhost.10ge2p1x520-dot1q-l2xcbase-eth-2vhostvr1024-1vm-ndrpdrdisc.tc01-64b-1t1c-eth-l2xcbase-eth-2vhost-1vm-ndrdisc,L2XC-vhost-VM,10ge2p1x520: 64B-1t1c-eth-l2xcbase-eth-2vhost-1vm-ndrdisc
+tests.vpp.perf.vm vhost.10ge2p1x710-eth-l2bdbasemaclrn-eth-2vhostvr1024-1vm-ndrpdrdisc.tc01-64b-1t1c-eth-l2bdbasemaclrn-eth-2vhost-1vm-ndrdisc,L2BD-vhost-VM,10ge2p1x710: 64B-1t1c-eth-l2bdbasemaclrn-eth-2vhost-1vm-ndrdisc
+tests.vpp.perf.vm vhost.10ge2p1x520-ethip4-ip4base-eth-2vhostvr1024-1vm-ndrpdrdisc.tc01-64b-1t1c-ethip4-ip4base-eth-2vhostvr1024-1vm-ndrdisc,IPv4 vhost,10ge2p1x520: 64B-1t1c-ethip4-ip4base-eth-2vhostvr1024-1vm-ndrdisc
+tests.vpp.perf.vm vhost.10ge2p1x520-dot1q-l2xcbase-eth-2vhostvr1024-1vm-ndrpdrdisc.tc02-64b-1t1c-eth-l2xcbase-eth-2vhost-1vm-pdrdisc,L2XC-vhost-VM,10ge2p1x520: 64B-1t1c-eth-l2xcbase-eth-2vhost-1vm-pdrdisc
+tests.vpp.perf.vm vhost.10ge2p1x710-eth-l2bdbasemaclrn-eth-2vhostvr1024-1vm-ndrpdrdisc.tc02-64b-1t1c-eth-l2bdbasemaclrn-eth-2vhost-1vm-pdrdisc,L2BD-vhost-VM,10ge2p1x520: 64B-1t1c-eth-l2bdbasemaclrn-eth-2vhost-1vm-pdrdisc
+tests.vpp.perf.vm vhost.10ge2p1x520-ethip4-ip4base-eth-2vhostvr1024-1vm-ndrpdrdisc.tc02-64b-1t1c-ethip4-ip4base-eth-2vhostvr1024-1vm-pdrdisc,IPv4 vhost,10ge2p1x520: 64B-1t1c-ethip4-ip4base-eth-2vhostvr1024-1vm-pdrdisc
+tests.vpp.perf.ip4 tunnels.10ge2p1x520-ethip4lispip4-ip4base-ndrpdrdisc.tc02-64b-1t1c-ethip4lispip4-ip4base-pdrdisc,IPv4 LISP,10ge2p1x520: 64B-1t1c-ethip4lispip4-ip4base-pdrdisc
+tests.vpp.perf.ip6.10ge2p1x520-ethip6-ip6base-ndrpdrdisc.tc02-78b-1t1c-ethip6-ip6base-pdrdisc,IPv6,10ge2p1x520: 78B-1t1c-ethip6-ip6base-pdrdisc
+tests.vpp.perf.ip6.10ge2p1x520-ethip6-ip6base-copwhtlistbase-ndrpdrdisc.tc02-78b-1t1c-ethip6-ip6base-copwhtlistbase-pdrdisc,IPv6 COP,10ge2p1x520: 78B-1t1c-ethip6-ip6base-copwhtlistbase-pdrdisc
+tests.vpp.perf.ip6.10ge2p1x520-ethip6-ip6base-iacldstbase-ndrpdrdisc.tc02-78b-1t1c-ethip6-ip6base-iacldstbase-pdrdisc,IPv6 iAcl,10ge2p1x520: 78B-1t1c-ethip6-ip6base-iacldstbase-pdrdisc
+tests.vpp.perf.ip6.10ge2p1x520-ethip6-ip6scale2m-ndrpdrdisc.tc02-78b-1t1c-ethip6-ip6scale2m-pdrdisc,IPv6 FIB 2M,10ge2p1x520: 78B-1t1c-ethip6-ip6scale2m-pdrdisc
+tests.vpp.perf.l2.10ge2p1x520-eth-l2xcbase-eth-2memif-1lxc-ndrpdrdisc.tc02-64b-1t1c-eth-l2xcbase-eth-2memif-1lxc-pdrdisc,LXC MEMIF,10ge2p1x520: 64B-1t1c-eth-l2xcbase-eth-2memif-1lxc-pdrdisc
+tests.vpp.perf.l2.10ge2p1x520-eth-l2xcbase-ndrpdrdisc.tc02-64b-1t1c-eth-l2xcbase-pdrdisc,L2XC,10ge2p1x520: 64B-1t1c-eth-l2xcbase-pdrdisc
+tests.vpp.perf.l2.10ge2p1x520-dot1ad-l2xcbase-ndrpdrdisc.tc02-64b-1t1c-dot1ad-l2xcbase-pdrdisc,L2XC dot1ad,10ge2p1x520: 64B-1t1c-dot1ad-l2xcbase-pdrdisc
+tests.vpp.perf.l2.10ge2p1x520-dot1q-l2xcbase-ndrpdrdisc.tc02-64b-1t1c-dot1q-l2xcbase-pdrdisc,L2XC dot1q,10ge2p1x520: 64B-1t1c-dot1q-l2xcbase-pdrdisc
+tests.vpp.perf.ip4 tunnels.10ge2p1x520-ethip4vxlan-l2xcbase-ndrpdrdisc.tc02-64b-1t1c-ethip4vxlan-l2xcbase-pdrdisc,L2XC VxLAN,10ge2p1x520: 64B-1t1c-ethip4vxlan-l2xcbase-pdrdisc
+tests.vpp.perf.l2.10ge2p1x520-eth-l2bdbasemaclrn-ndrpdrdisc.tc02-64b-1t1c-eth-l2bdbasemaclrn-pdrdisc,L2BD,10ge2p1x520: 64B-1t1c-eth-l2bdbasemaclrn-pdrdisc
+tests.vpp.perf.ip4.10ge2p1x520-ethip4-ip4base-ndrpdrdisc.tc02-64b-1t1c-ethip4-ip4base-pdrdisc,IPv4,10ge2p1x520: 64B-1t1c-ethip4-ip4base-pdrdisc
+tests.vpp.perf.ip4.10ge2p1x520-ethip4-ip4base-copwhtlistbase-ndrpdrdisc.tc02-64b-1t1c-ethip4-ip4base-copwhtlistbase-pdrdisc,IPv4 COP,10ge2p1x520: 64B-1t1c-ethip4-ip4base-copwhtlistbase-pdrdisc
+tests.vpp.perf.ip4.10ge2p1x520-ethip4-ip4base-iacldstbase-ndrpdrdisc.tc02-64b-1t1c-ethip4-ip4base-iacldstbase-pdrdisc,IPv4 iAcl,10ge2p1x520: 64B-1t1c-ethip4-ip4base-iacldstbase-pdrdisc
+tests.vpp.perf.ip4.10ge2p1x520-ethip4-ip4scale200k-ndrpdrdisc.tc02-64b-1t1c-ethip4-ip4scale200k-pdrdisc,IPv4 FIB 200k,10ge2p1x520: 64B-1t1c-ethip4-ip4scale200k-pdrdisc
+tests.vpp.perf.ip4.10ge2p1x520-ethip4-ip4scale20k-ndrpdrdisc.tc02-64b-1t1c-ethip4-ip4scale20k-pdrdisc,IPv4 FIB 20k,10ge2p1x520: 64B-1t1c-ethip4-ip4scale20k-pdrdisc
+tests.vpp.perf.ip4.10ge2p1x520-ethip4-ip4scale2m-ndrpdrdisc.tc02-64b-1t1c-ethip4-ip4scale2m-pdrdisc,IPv4 FIB 2M,10ge2p1x520: 64B-1t1c-ethip4-ip4scale2m-pdrdisc
+tests.vpp.perf.ip4.10ge2p1x520-ethip4-ip4base-ipolicemarkbase-ndrpdrdisc.tc02-64b-1t1c-ethip4-ip4base-ipolicemarkbase-pdrdisc,IPv4 Policer,10ge2p1x520: 64B-1t1c-ethip4-ip4base-ipolicemarkbase-pdrdisc
+tests.vpp.perf.ip6.10ge2p1x520-ethip6-ip6scale200k-ndrpdrdisc.tc02-78b-1t1c-ethip6-ip6scale200k-pdrdisc,IPv6 FIB 200k,10ge2p1x520: 78B-1t1c-ethip6-ip6scale200k-pdrdisc
+tests.vpp.perf.ip6.10ge2p1x520-ethip6-ip6scale20k-ndrpdrdisc.tc02-78b-1t1c-ethip6-ip6scale20k-pdrdisc,IPv6 FIB 20k,10ge2p1x520: 78B-1t1c-ethip6-ip6scale20k-pdrdisc