UTI: Add comments and clean the code. 40/36440/3
authorTibor Frank <tifrank@cisco.com>
Wed, 15 Jun 2022 12:29:02 +0000 (14:29 +0200)
committerTibor Frank <tifrank@cisco.com>
Wed, 29 Jun 2022 11:14:23 +0000 (11:14 +0000)
Change-Id: I6fba9aac20ed22c2ae1450161edc8c11ffa1e24d
Signed-off-by: Tibor Frank <tifrank@cisco.com>
resources/tools/dash/app/pal/__init__.py
resources/tools/dash/app/pal/data/data.py
resources/tools/dash/app/pal/data/data.yaml
resources/tools/dash/app/pal/data/url_processing.py
resources/tools/dash/app/pal/news/layout.py
resources/tools/dash/app/pal/routes.py

index f66edce..9f80c5f 100644 (file)
@@ -57,6 +57,7 @@ def init_app():
         assets = Environment()
         assets.init_app(app)
 
         assets = Environment()
         assets.init_app(app)
 
+        # Set the time period for Trending
         if TIME_PERIOD is None or TIME_PERIOD > MAX_TIME_PERIOD:
             time_period = MAX_TIME_PERIOD
         else:
         if TIME_PERIOD is None or TIME_PERIOD > MAX_TIME_PERIOD:
             time_period = MAX_TIME_PERIOD
         else:
index efe2a2d..f2c02ac 100644 (file)
@@ -11,7 +11,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-"""Prepare data for Plotly Dash."""
+"""Prepare data for Plotly Dash applications.
+"""
 
 import logging
 
 
 import logging
 
@@ -27,11 +28,20 @@ from awswrangler.exceptions import EmptyDataFrame, NoFilesFound
 
 
 class Data:
 
 
 class Data:
-    """
+    """Gets the data from parquets and stores it for further use by dash
+    applications.
     """
 
     def __init__(self, data_spec_file: str, debug: bool=False) -> None:
     """
 
     def __init__(self, data_spec_file: str, debug: bool=False) -> None:
-        """
+        """Initialize the Data object.
+
+        :param data_spec_file: Path to file specifying the data to be read from
+            parquets.
+        :param debug: If True, the debuf information is printed to stdout.
+        :type data_spec_file: str
+        :type debug: bool
+        :raises RuntimeError: if it is not possible to open data_spec_file or it
+            is not a valid yaml file.
         """
 
         # Inputs:
         """
 
         # Inputs:
@@ -64,6 +74,17 @@ class Data:
         return self._data
 
     def _get_columns(self, parquet: str) -> list:
         return self._data
 
     def _get_columns(self, parquet: str) -> list:
+        """Get the list of columns from the data specification file to be read
+        from parquets.
+
+        :param parquet: The parquet's name.
+        :type parquet: str
+        :raises RuntimeError: if the parquet is not defined in the data
+            specification file or it does not have any columns specified.
+        :returns: List of columns.
+        :rtype: list
+        """
+
         try:
             return self._data_spec[parquet]["columns"]
         except KeyError as err:
         try:
             return self._data_spec[parquet]["columns"]
         except KeyError as err:
@@ -74,6 +95,17 @@ class Data:
             )
 
     def _get_path(self, parquet: str) -> str:
             )
 
     def _get_path(self, parquet: str) -> str:
+        """Get the path from the data specification file to be read from
+        parquets.
+
+        :param parquet: The parquet's name.
+        :type parquet: str
+        :raises RuntimeError: if the parquet is not defined in the data
+            specification file or it does not have the path specified.
+        :returns: Path.
+        :rtype: str
+        """
+
         try:
             return self._data_spec[parquet]["path"]
         except KeyError as err:
         try:
             return self._data_spec[parquet]["path"]
         except KeyError as err:
@@ -84,9 +116,12 @@ class Data:
             )
 
     def _create_dataframe_from_parquet(self,
             )
 
     def _create_dataframe_from_parquet(self,
-        path, partition_filter=None, columns=None,
-        validate_schema=False, last_modified_begin=None,
-        last_modified_end=None, days=None) -> DataFrame:
+        path, partition_filter=None,
+        columns=None,
+        validate_schema=False,
+        last_modified_begin=None,
+        last_modified_end=None,
+        days=None) -> DataFrame:
         """Read parquet stored in S3 compatible storage and returns Pandas
         Dataframe.
 
         """Read parquet stored in S3 compatible storage and returns Pandas
         Dataframe.
 
@@ -151,8 +186,21 @@ class Data:
         return df
 
     def read_stats(self, days: int=None) -> tuple:
         return df
 
     def read_stats(self, days: int=None) -> tuple:
-        """Read Suite Result Analysis data partition from parquet.
+        """Read statistics from parquet.
+
+        It reads from:
+        - Suite Result Analysis (SRA) partition,
+        - NDRPDR trending partition,
+        - MRR trending partition.
+
+        :param days: Number of days back to the past for which the data will be
+            read.
+        :type days: int
+        :returns: tuple of pandas DataFrame-s with data read from specified
+            parquets.
+        :rtype: tuple of pandas DataFrame-s
         """
         """
+
         l_stats = lambda part: True if part["stats_type"] == "sra" else False
         l_mrr = lambda part: True if part["test_type"] == "mrr" else False
         l_ndrpdr = lambda part: True if part["test_type"] == "ndrpdr" else False
         l_stats = lambda part: True if part["stats_type"] == "sra" else False
         l_mrr = lambda part: True if part["test_type"] == "mrr" else False
         l_ndrpdr = lambda part: True if part["test_type"] == "ndrpdr" else False
@@ -180,7 +228,14 @@ class Data:
 
     def read_trending_mrr(self, days: int=None) -> DataFrame:
         """Read MRR data partition from parquet.
 
     def read_trending_mrr(self, days: int=None) -> DataFrame:
         """Read MRR data partition from parquet.
+
+        :param days: Number of days back to the past for which the data will be
+            read.
+        :type days: int
+        :returns: Pandas DataFrame with read data.
+        :rtype: DataFrame
         """
         """
+
         lambda_f = lambda part: True if part["test_type"] == "mrr" else False
 
         return self._create_dataframe_from_parquet(
         lambda_f = lambda part: True if part["test_type"] == "mrr" else False
 
         return self._create_dataframe_from_parquet(
@@ -192,7 +247,14 @@ class Data:
 
     def read_trending_ndrpdr(self, days: int=None) -> DataFrame:
         """Read NDRPDR data partition from iterative parquet.
 
     def read_trending_ndrpdr(self, days: int=None) -> DataFrame:
         """Read NDRPDR data partition from iterative parquet.
+
+        :param days: Number of days back to the past for which the data will be
+            read.
+        :type days: int
+        :returns: Pandas DataFrame with read data.
+        :rtype: DataFrame
         """
         """
+
         lambda_f = lambda part: True if part["test_type"] == "ndrpdr" else False
 
         return self._create_dataframe_from_parquet(
         lambda_f = lambda part: True if part["test_type"] == "ndrpdr" else False
 
         return self._create_dataframe_from_parquet(
@@ -204,7 +266,13 @@ class Data:
 
     def read_iterative_mrr(self, release: str) -> DataFrame:
         """Read MRR data partition from iterative parquet.
 
     def read_iterative_mrr(self, release: str) -> DataFrame:
         """Read MRR data partition from iterative parquet.
+
+        :param release: The CSIT release from which the data will be read.
+        :type release: str
+        :returns: Pandas DataFrame with read data.
+        :rtype: DataFrame
         """
         """
+
         lambda_f = lambda part: True if part["test_type"] == "mrr" else False
 
         return self._create_dataframe_from_parquet(
         lambda_f = lambda part: True if part["test_type"] == "mrr" else False
 
         return self._create_dataframe_from_parquet(
@@ -215,7 +283,13 @@ class Data:
 
     def read_iterative_ndrpdr(self, release: str) -> DataFrame:
         """Read NDRPDR data partition from parquet.
 
     def read_iterative_ndrpdr(self, release: str) -> DataFrame:
         """Read NDRPDR data partition from parquet.
+
+        :param release: The CSIT release from which the data will be read.
+        :type release: str
+        :returns: Pandas DataFrame with read data.
+        :rtype: DataFrame
         """
         """
+
         lambda_f = lambda part: True if part["test_type"] == "ndrpdr" else False
 
         return self._create_dataframe_from_parquet(
         lambda_f = lambda part: True if part["test_type"] == "ndrpdr" else False
 
         return self._create_dataframe_from_parquet(
index 69f7165..2585ef0 100644 (file)
@@ -26,13 +26,10 @@ trending-mrr:
     - start_time
     - passed
     - test_id
     - start_time
     - passed
     - test_id
-    # - test_name_long
-    # - test_name_short
     - version
     - result_receive_rate_rate_avg
     - result_receive_rate_rate_stdev
     - result_receive_rate_rate_unit
     - version
     - result_receive_rate_rate_avg
     - result_receive_rate_rate_stdev
     - result_receive_rate_rate_unit
-    # - result_receive_rate_rate_values
 trending-ndrpdr:
   path: s3://fdio-docs-s3-cloudfront-index/csit/parquet/trending
   columns:
 trending-ndrpdr:
   path: s3://fdio-docs-s3-cloudfront-index/csit/parquet/trending
   columns:
@@ -44,65 +41,21 @@ trending-ndrpdr:
     - start_time
     - passed
     - test_id
     - start_time
     - passed
     - test_id
-    # - test_name_long
-    # - test_name_short
     - version
     - version
-    # - result_pdr_upper_rate_unit
-    # - result_pdr_upper_rate_value
-    # - result_pdr_upper_bandwidth_unit
-    # - result_pdr_upper_bandwidth_value
     - result_pdr_lower_rate_unit
     - result_pdr_lower_rate_value
     - result_pdr_lower_rate_unit
     - result_pdr_lower_rate_value
-    # - result_pdr_lower_bandwidth_unit
-    # - result_pdr_lower_bandwidth_value
-    # - result_ndr_upper_rate_unit
-    # - result_ndr_upper_rate_value
-    # - result_ndr_upper_bandwidth_unit
-    # - result_ndr_upper_bandwidth_value
     - result_ndr_lower_rate_unit
     - result_ndr_lower_rate_value
     - result_ndr_lower_rate_unit
     - result_ndr_lower_rate_value
-    # - result_ndr_lower_bandwidth_unit
-    # - result_ndr_lower_bandwidth_value
-    # - result_latency_reverse_pdr_90_avg
     - result_latency_reverse_pdr_90_hdrh
     - result_latency_reverse_pdr_90_hdrh
-    # - result_latency_reverse_pdr_90_max
-    # - result_latency_reverse_pdr_90_min
-    # - result_latency_reverse_pdr_90_unit
-    # - result_latency_reverse_pdr_50_avg
     - result_latency_reverse_pdr_50_hdrh
     - result_latency_reverse_pdr_50_hdrh
-    # - result_latency_reverse_pdr_50_max
-    # - result_latency_reverse_pdr_50_min
-    # - result_latency_reverse_pdr_50_unit
-    # - result_latency_reverse_pdr_10_avg
     - result_latency_reverse_pdr_10_hdrh
     - result_latency_reverse_pdr_10_hdrh
-    # - result_latency_reverse_pdr_10_max
-    # - result_latency_reverse_pdr_10_min
-    # - result_latency_reverse_pdr_10_unit
-    # - result_latency_reverse_pdr_0_avg
     - result_latency_reverse_pdr_0_hdrh
     - result_latency_reverse_pdr_0_hdrh
-    # - result_latency_reverse_pdr_0_max
-    # - result_latency_reverse_pdr_0_min
-    # - result_latency_reverse_pdr_0_unit
-    # - result_latency_forward_pdr_90_avg
     - result_latency_forward_pdr_90_hdrh
     - result_latency_forward_pdr_90_hdrh
-    # - result_latency_forward_pdr_90_max
-    # - result_latency_forward_pdr_90_min
-    # - result_latency_forward_pdr_90_unit
     - result_latency_forward_pdr_50_avg
     - result_latency_forward_pdr_50_hdrh
     - result_latency_forward_pdr_50_avg
     - result_latency_forward_pdr_50_hdrh
-    # - result_latency_forward_pdr_50_max
-    # - result_latency_forward_pdr_50_min
     - result_latency_forward_pdr_50_unit
     - result_latency_forward_pdr_50_unit
-    # - result_latency_forward_pdr_10_avg
     - result_latency_forward_pdr_10_hdrh
     - result_latency_forward_pdr_10_hdrh
-    # - result_latency_forward_pdr_10_max
-    # - result_latency_forward_pdr_10_min
-    # - result_latency_forward_pdr_10_unit
-    # - result_latency_forward_pdr_0_avg
     - result_latency_forward_pdr_0_hdrh
     - result_latency_forward_pdr_0_hdrh
-    # - result_latency_forward_pdr_0_max
-    # - result_latency_forward_pdr_0_min
-    # - result_latency_forward_pdr_0_unit
 iterative-mrr:
   path: s3://fdio-docs-s3-cloudfront-index/csit/parquet/iterative_{release}
   columns:
 iterative-mrr:
   path: s3://fdio-docs-s3-cloudfront-index/csit/parquet/iterative_{release}
   columns:
@@ -114,8 +67,6 @@ iterative-mrr:
     - start_time
     - passed
     - test_id
     - start_time
     - passed
     - test_id
-    # - test_name_long
-    # - test_name_short
     - version
     - result_receive_rate_rate_avg
     - result_receive_rate_rate_stdev
     - version
     - result_receive_rate_rate_avg
     - result_receive_rate_rate_stdev
@@ -132,66 +83,14 @@ iterative-ndrpdr:
     - start_time
     - passed
     - test_id
     - start_time
     - passed
     - test_id
-    # - test_name_long
-    # - test_name_short
     - version
     - version
-    # - result_pdr_upper_rate_unit
-    # - result_pdr_upper_rate_value
-    # - result_pdr_upper_bandwidth_unit
-    # - result_pdr_upper_bandwidth_value
     - result_pdr_lower_rate_unit
     - result_pdr_lower_rate_value
     - result_pdr_lower_rate_unit
     - result_pdr_lower_rate_value
-    # - result_pdr_lower_bandwidth_unit
-    # - result_pdr_lower_bandwidth_value
-    # - result_ndr_upper_rate_unit
-    # - result_ndr_upper_rate_value
-    # - result_ndr_upper_bandwidth_unit
-    # - result_ndr_upper_bandwidth_value
     - result_ndr_lower_rate_unit
     - result_ndr_lower_rate_value
     - result_ndr_lower_rate_unit
     - result_ndr_lower_rate_value
-    # - result_ndr_lower_bandwidth_unit
-    # - result_ndr_lower_bandwidth_value
-    # - result_latency_reverse_pdr_90_avg
-    ## - result_latency_reverse_pdr_90_hdrh
-    # - result_latency_reverse_pdr_90_max
-    # - result_latency_reverse_pdr_90_min
-    # - result_latency_reverse_pdr_90_unit
-    # - result_latency_reverse_pdr_50_avg
-    ## - result_latency_reverse_pdr_50_hdrh
-    # - result_latency_reverse_pdr_50_max
-    # - result_latency_reverse_pdr_50_min
-    # - result_latency_reverse_pdr_50_unit
-    # - result_latency_reverse_pdr_10_avg
-    ## - result_latency_reverse_pdr_10_hdrh
-    # - result_latency_reverse_pdr_10_max
-    # - result_latency_reverse_pdr_10_min
-    # - result_latency_reverse_pdr_10_unit
-    # - result_latency_reverse_pdr_0_avg
-    ## - result_latency_reverse_pdr_0_hdrh
-    # - result_latency_reverse_pdr_0_max
-    # - result_latency_reverse_pdr_0_min
-    # - result_latency_reverse_pdr_0_unit
-    # - result_latency_forward_pdr_90_avg
-    ## - result_latency_forward_pdr_90_hdrh
-    # - result_latency_forward_pdr_90_max
-    # - result_latency_forward_pdr_90_min
-    # - result_latency_forward_pdr_90_unit
     - result_latency_forward_pdr_50_avg
     - result_latency_forward_pdr_50_avg
-    ## - result_latency_forward_pdr_50_hdrh
-    # - result_latency_forward_pdr_50_max
-    # - result_latency_forward_pdr_50_min
     - result_latency_forward_pdr_50_unit
     - result_latency_forward_pdr_50_unit
-    # - result_latency_forward_pdr_10_avg
-    ## - result_latency_forward_pdr_10_hdrh
-    # - result_latency_forward_pdr_10_max
-    # - result_latency_forward_pdr_10_min
-    # - result_latency_forward_pdr_10_unit
-    # - result_latency_forward_pdr_0_avg
-    ## - result_latency_forward_pdr_0_hdrh
-    # - result_latency_forward_pdr_0_max
-    # - result_latency_forward_pdr_0_min
-    # - result_latency_forward_pdr_0_unit
 # coverage-ndrpdr:
 #   path: str
 #   columns:
 # coverage-ndrpdr:
 #   path: str
 #   columns:
-#     - list
\ No newline at end of file
+#     - list
index 22cd034..9307015 100644 (file)
@@ -24,8 +24,20 @@ from binascii import Error as BinasciiErr
 
 
 def url_encode(params: dict) -> str:
 
 
 def url_encode(params: dict) -> str:
+    """Encode the URL parameters and zip them and create the whole URL using
+    given data.
+
+    :param params: All data necessary to create the URL:
+        - scheme,
+        - network location,
+        - path,
+        - query,
+        - parameters.
+    :type params: dict
+    :returns: Encoded URL.
+    :rtype: str
     """
     """
-    """
+
     url_params = params.get("params", None)
     if url_params:
         encoded_params = urlsafe_b64encode(
     url_params = params.get("params", None)
     if url_params:
         encoded_params = urlsafe_b64encode(
@@ -45,8 +57,14 @@ def url_encode(params: dict) -> str:
 
 
 def url_decode(url: str) -> dict:
 
 
 def url_decode(url: str) -> dict:
+    """Parse the given URL and decode the parameters.
+
+    :param url: URL to be parsed and decoded.
+    :type url: str
+    :returns: Paresed URL.
+    :rtype: dict
     """
     """
-    """
+
     try:
         parsed_url = urlparse(url)
     except ValueError as err:
     try:
         parsed_url = urlparse(url)
     except ValueError as err:
index c34575b..b8edb7a 100644 (file)
@@ -31,20 +31,32 @@ from .tables import table_failed
 
 
 class Layout:
 
 
 class Layout:
-    """
+    """The layout of the dash app and the callbacks.
     """
 
     """
 
+    # The default job displayed when the page is loaded first time.
     DEFAULT_JOB = "csit-vpp-perf-mrr-daily-master-2n-icx"
 
     DEFAULT_JOB = "csit-vpp-perf-mrr-daily-master-2n-icx"
 
-    URL_STYLE = {
-        "background-color": "#d2ebf5",
-        "border-color": "#bce1f1",
-        "color": "#135d7c"
-    }
-
-    def __init__(self, app: Flask, html_layout_file: str,
-        data_spec_file: str, tooltip_file: str) -> None:
-        """
+    def __init__(self, app: Flask, html_layout_file: str, data_spec_file: str,
+        tooltip_file: str) -> None:
+        """Initialization:
+        - save the input parameters,
+        - read and pre-process the data,
+        - prepare data fro the control panel,
+        - read HTML layout file,
+        - read tooltips from the tooltip file.
+
+        :param app: Flask application running the dash application.
+        :param html_layout_file: Path and name of the file specifying the HTML
+            layout of the dash application.
+        :param data_spec_file: Path and name of the file specifying the data to
+            be read from parquets for this application.
+        :param tooltip_file: Path and name of the yaml file specifying the
+            tooltips.
+        :type app: Flask
+        :type html_layout_file: str
+        :type data_spec_file: str
+        :type tooltip_file: str
         """
 
         # Inputs
         """
 
         # Inputs
@@ -61,6 +73,7 @@ class Layout:
 
         df_tst_info = pd.concat([data_mrr, data_ndrpdr], ignore_index=True)
 
 
         df_tst_info = pd.concat([data_mrr, data_ndrpdr], ignore_index=True)
 
+        # Prepare information for the control panel:
         jobs = sorted(list(df_tst_info["job"].unique()))
         job_info = {
             "job": list(),
         jobs = sorted(list(df_tst_info["job"].unique()))
         job_info = {
             "job": list(),
@@ -80,6 +93,7 @@ class Layout:
 
         self._default = self._set_job_params(self.DEFAULT_JOB)
 
 
         self._default = self._set_job_params(self.DEFAULT_JOB)
 
+        # Pre-process the data:
         tst_info = {
             "job": list(),
             "build": list(),
         tst_info = {
             "job": list(),
             "build": list(),
@@ -118,7 +132,7 @@ class Layout:
         self._data = pd.DataFrame.from_dict(tst_info)
 
         # Read from files:
         self._data = pd.DataFrame.from_dict(tst_info)
 
         # Read from files:
-        self._html_layout = ""
+        self._html_layout = str()
         self._tooltips = dict()
 
         try:
         self._tooltips = dict()
 
         try:
@@ -157,23 +171,41 @@ class Layout:
         return self._data
 
     @property
         return self._data
 
     @property
-    def default(self) -> any:
+    def default(self) -> dict:
         return self._default
 
     def _get_duts(self) -> list:
         return self._default
 
     def _get_duts(self) -> list:
-        """
+        """Get the list of DUTs from the pre-processed information about jobs.
+
+        :returns: Alphabeticaly sorted list of DUTs.
+        :rtype: list
         """
         return sorted(list(self.df_job_info["dut"].unique()))
 
     def _get_ttypes(self, dut: str) -> list:
         """
         return sorted(list(self.df_job_info["dut"].unique()))
 
     def _get_ttypes(self, dut: str) -> list:
-        """
+        """Get the list of test types from the pre-processed information about
+        jobs.
+
+        :param dut: The DUT for which the list of test types will be populated.
+        :type dut: str
+        :returns: Alphabeticaly sorted list of test types.
+        :rtype: list
         """
         return sorted(list(self.df_job_info.loc[(
             self.df_job_info["dut"] == dut
         )]["ttype"].unique()))
 
     def _get_cadences(self, dut: str, ttype: str) -> list:
         """
         return sorted(list(self.df_job_info.loc[(
             self.df_job_info["dut"] == dut
         )]["ttype"].unique()))
 
     def _get_cadences(self, dut: str, ttype: str) -> list:
-        """
+        """Get the list of cadences from the pre-processed information about
+        jobs.
+
+        :param dut: The DUT for which the list of cadences will be populated.
+        :param ttype: The test type for which the list of cadences will be
+            populated.
+        :type dut: str
+        :type ttype: str
+        :returns: Alphabeticaly sorted list of cadences.
+        :rtype: list
         """
         return sorted(list(self.df_job_info.loc[(
             (self.df_job_info["dut"] == dut) &
         """
         return sorted(list(self.df_job_info.loc[(
             (self.df_job_info["dut"] == dut) &
@@ -181,7 +213,19 @@ class Layout:
         )]["cadence"].unique()))
 
     def _get_test_beds(self, dut: str, ttype: str, cadence: str) -> list:
         )]["cadence"].unique()))
 
     def _get_test_beds(self, dut: str, ttype: str, cadence: str) -> list:
-        """
+        """Get the list of test beds from the pre-processed information about
+        jobs.
+
+        :param dut: The DUT for which the list of test beds will be populated.
+        :param ttype: The test type for which the list of test beds will be
+            populated.
+        :param cadence: The cadence for which the list of test beds will be
+            populated.
+        :type dut: str
+        :type ttype: str
+        :type cadence: str
+        :returns: Alphabeticaly sorted list of test beds.
+        :rtype: list
         """
         return sorted(list(self.df_job_info.loc[(
             (self.df_job_info["dut"] == dut) &
         """
         return sorted(list(self.df_job_info.loc[(
             (self.df_job_info["dut"] == dut) &
@@ -190,9 +234,19 @@ class Layout:
         )]["tbed"].unique()))
 
     def _get_job(self, dut, ttype, cadence, testbed):
         )]["tbed"].unique()))
 
     def _get_job(self, dut, ttype, cadence, testbed):
-        """Get the name of a job defined by dut, ttype, cadence, testbed.
-
-        Input information comes from control panel.
+        """Get the name of a job defined by dut, ttype, cadence, test bed.
+        Input information comes from the control panel.
+
+        :param dut: The DUT for which the job name will be created.
+        :param ttype: The test type for which the job name will be created.
+        :param cadence: The cadence for which the job name will be created.
+        :param testbed: The test bed for which the job name will be created.
+        :type dut: str
+        :type ttype: str
+        :type cadence: str
+        :type testbed: str
+        :returns: Job name.
+        :rtype: str
         """
         return self.df_job_info.loc[(
             (self.df_job_info["dut"] == dut) &
         """
         return self.df_job_info.loc[(
             (self.df_job_info["dut"] == dut) &
@@ -201,9 +255,30 @@ class Layout:
             (self.df_job_info["tbed"] == testbed)
         )]["job"].item()
 
             (self.df_job_info["tbed"] == testbed)
         )]["job"].item()
 
-    def _set_job_params(self, job: str) -> dict:
+    @staticmethod
+    def _generate_options(opts: list) -> list:
+        """Return list of options for radio items in control panel. The items in
+        the list are dictionaries with keys "label" and "value".
+
+        :params opts: List of options (str) to be used for the generated list.
+        :type opts: list
+        :returns: List of options (dict).
+        :rtype: list
         """
         """
+        return [{"label": i, "value": i} for i in opts]
+
+    def _set_job_params(self, job: str) -> dict:
+        """Create a dictionary with all options and values for (and from) the
+        given job.
+
+        :params job: The name of job for and from which the dictionary will be
+            created.
+        :type job: str
+        :returns: Dictionary with all options and values for (and from) the
+            given job.
+        :rtype: dict
         """
         """
+
         lst_job = job.split("-")
         return {
             "job": job,
         lst_job = job.split("-")
         return {
             "job": job,
@@ -221,8 +296,22 @@ class Layout:
 
     def _show_tooltip(self, id: str, title: str,
             clipboard_id: str=None) -> list:
 
     def _show_tooltip(self, id: str, title: str,
             clipboard_id: str=None) -> list:
+        """Generate list of elements to display a text (e.g. a title) with a
+        tooltip and optionaly with Copy&Paste icon and the clipboard
+        functionality enabled.
+
+        :param id: Tooltip ID.
+        :param title: A text for which the tooltip will be displayed.
+        :param clipboard_id: If defined, a Copy&Paste icon is displayed and the
+            clipboard functionality is enabled.
+        :type id: str
+        :type title: str
+        :type clipboard_id: str
+        :returns: List of elements to display a text with a tooltip and
+            optionaly with Copy&Paste icon.
+        :rtype: list
         """
         """
-        """
+
         return [
             dcc.Clipboard(target_id=clipboard_id, title="Copy URL") \
                 if clipboard_id else str(),
         return [
             dcc.Clipboard(target_id=clipboard_id, title="Copy URL") \
                 if clipboard_id else str(),
@@ -243,8 +332,19 @@ class Layout:
         ]
 
     def add_content(self):
         ]
 
     def add_content(self):
+        """Top level method which generated the web page.
+
+        It generates:
+        - Store for user input data,
+        - Navigation bar,
+        - Main area with control panel and ploting area.
+
+        If no HTML layout is provided, an error message is displayed instead.
+
+        :returns: The HTML div with teh whole page.
+        :rtype: html.Div
         """
         """
-        """
+
         if self.html_layout:
             return html.Div(
                 id="div-main",
         if self.html_layout:
             return html.Div(
                 id="div-main",
@@ -282,7 +382,11 @@ class Layout:
 
     def _add_navbar(self):
         """Add nav element with navigation panel. It is placed on the top.
 
     def _add_navbar(self):
         """Add nav element with navigation panel. It is placed on the top.
+
+        :returns: Navigation bar.
+        :rtype: dbc.NavbarSimple
         """
         """
+
         return dbc.NavbarSimple(
             id="navbarsimple-main",
             children=[
         return dbc.NavbarSimple(
             id="navbarsimple-main",
             children=[
@@ -303,8 +407,12 @@ class Layout:
         )
 
     def _add_ctrl_col(self) -> dbc.Col:
         )
 
     def _add_ctrl_col(self) -> dbc.Col:
-        """Add column with controls. It is placed on the left side.
+        """Add column with control panel. It is placed on the left side.
+
+        :returns: Column with the control panel.
+        :rtype: dbc.col
         """
         """
+
         return dbc.Col(
             id="col-controls",
             children=[
         return dbc.Col(
             id="col-controls",
             children=[
@@ -313,8 +421,12 @@ class Layout:
         )
 
     def _add_plotting_col(self) -> dbc.Col:
         )
 
     def _add_plotting_col(self) -> dbc.Col:
-        """Add column with plots and tables. It is placed on the right side.
+        """Add column with tables. It is placed on the right side.
+
+        :returns: Column with tables.
+        :rtype: dbc.col
         """
         """
+
         return dbc.Col(
             id="col-plotting-area",
             children=[
         return dbc.Col(
             id="col-plotting-area",
             children=[
@@ -328,7 +440,10 @@ class Layout:
         )
 
     def _add_ctrl_panel(self) -> dbc.Row:
         )
 
     def _add_ctrl_panel(self) -> dbc.Row:
-        """
+        """Add control panel.
+
+        :returns: Control panel.
+        :rtype: dbc.Row
         """
         return dbc.Row(
             id="row-ctrl-panel",
         """
         return dbc.Row(
             id="row-ctrl-panel",
@@ -419,7 +534,13 @@ class Layout:
         )
 
     class ControlPanel:
         )
 
     class ControlPanel:
+        """
+        """
+
         def __init__(self, panel: dict, default: dict) -> None:
         def __init__(self, panel: dict, default: dict) -> None:
+            """
+            """
+
             self._defaults = {
                 "ri-ttypes-options": default["ttypes"],
                 "ri-cadences-options": default["cadences"],
             self._defaults = {
                 "ri-ttypes-options": default["ttypes"],
                 "ri-cadences-options": default["cadences"],
@@ -456,10 +577,6 @@ class Layout:
         def values(self) -> list:
             return list(self._panel.values())
 
         def values(self) -> list:
             return list(self._panel.values())
 
-    @staticmethod
-    def _generate_options(opts: list) -> list:
-        return [{"label": i, "value": i} for i in opts]
-
     def callbacks(self, app):
 
         @app.callback(
     def callbacks(self, app):
 
         @app.callback(
index 63bbb30..d4cd88f 100644 (file)
@@ -18,13 +18,13 @@ from flask import current_app as app
 from flask import render_template
 
 
 from flask import render_template
 
 
-@app.route(u"/")
+@app.route("/")
 def home():
     """Landing page.
     """
     return render_template(
 def home():
     """Landing page.
     """
     return render_template(
-        u"index_layout.jinja2",
-        title=u"FD.io CSIT",
-        description=u"Performance Dashboard",
-        template=u"d-flex h-100 text-center text-white bg-dark"
+        "index_layout.jinja2",
+        title="FD.io CSIT",
+        description="Performance Dashboard",
+        template="d-flex h-100 text-center text-white bg-dark"
     )
     )