PAL: Convert XML to JSON
[csit.git] / resources / tools / presentation / input_data_files.py
index 237002f..5bd6af4 100644 (file)
@@ -1,4 +1,4 @@
-# Copyright (c) 2020 Cisco and/or its affiliates.
+# Copyright (c) 2021 Cisco and/or its affiliates.
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at:
@@ -21,7 +21,7 @@ import gzip
 
 from os import rename, mkdir
 from os.path import join
-from http.client import responses
+from http.client import responses, HTTPException
 from zipfile import ZipFile, is_zipfile, BadZipfile
 
 import requests
@@ -30,7 +30,7 @@ from requests.adapters import HTTPAdapter, Retry
 from requests.exceptions import RequestException
 from requests import codes
 
-from pal_errors import PresentationError
+from urllib3.exceptions import HTTPError
 
 
 # Chunk size used for file download
@@ -42,15 +42,19 @@ SEPARATOR = u"__"
 REGEX_RELEASE = re.compile(r'(\D*)(\d{4}|master)(\D*)')
 
 
-def _download_file(url, file_name, arch=False):
+def _download_file(url, file_name, arch=False, verify=True, repeat=1):
     """Download a file with input data.
 
     :param url: URL to the file to download.
     :param file_name: Name of file to download.
-    :param arch: If True, also .gz file is downloaded
+    :param arch: If True, also .gz file is downloaded.
+    :param verify: If true, verify the certificate.
+    :param repeat: The number of attempts to download the file.
     :type url: str
     :type file_name: str
     :type arch: bool
+    :type verify: bool
+    :type repeat: int
     :returns: True if the download was successful, otherwise False.
     :rtype: bool
     """
@@ -86,58 +90,54 @@ def _download_file(url, file_name, arch=False):
         return session
 
     success = False
-    session = None
-    try:
-        logging.info(f"    Connecting to {url} ...")
-        session = requests_retry_session()
-        response = session.get(url, stream=True, verify=False)
-        code = response.status_code
-        logging.info(f"    {code}: {responses[code]}")
-
-        if code != codes[u"OK"]:
-            if session:
-                session.close()
-            url = url.replace(u"_info", u"")
-            logging.info(f"    Connecting to {url} ...")
+    while repeat:
+        repeat -= 1
+        session = None
+        try:
+            logging.info(f"  Connecting to {url} ...")
             session = requests_retry_session()
-            response = session.get(url, stream=True, verify=False)
+            response = session.get(url, stream=True, verify=verify)
             code = response.status_code
-            logging.info(f"    {code}: {responses[code]}")
+            logging.info(f"  {code}: {responses[code]}")
+
             if code != codes[u"OK"]:
+                if session:
+                    session.close()
                 return False, file_name
-            file_name = file_name.replace(u"_info", u"")
-
-        dst_file_name = file_name.replace(u".gz", u"")
-        logging.info(f"    Downloading the file {url} to {dst_file_name} ...")
-        with open(dst_file_name, u"wb") as file_handle:
-            for chunk in response.iter_content(chunk_size=CHUNK_SIZE):
-                if chunk:
-                    file_handle.write(chunk)
 
-        if arch and u".gz" in file_name:
+            dst_file_name = file_name.replace(u".gz", u"")
+            logging.info(f"  Downloading the file {url} to {dst_file_name}")
+            with open(dst_file_name, u"wb") as file_handle:
+                for chunk in response.iter_content(chunk_size=CHUNK_SIZE):
+                    if chunk:
+                        file_handle.write(chunk)
+
+            if arch and u".gz" in file_name:
+                if session:
+                    session.close()
+                logging.info(f"  Downloading the file {url} to {file_name}")
+                session = requests_retry_session()
+                response = session.get(url, stream=True, verify=verify)
+                if response.status_code == codes[u"OK"]:
+                    with open(file_name, u"wb") as file_handle:
+                        file_handle.write(response.raw.read())
+                else:
+                    logging.error(
+                        f"Not possible to download the file "
+                        f"{url} to {file_name}"
+                    )
+
+            success = True
+            repeat = 0
+        except (HTTPException, HTTPError) as err:
+            logging.error(f"Connection broken:\n{repr(err)}")
+        except RequestException as err:
+            logging.error(f"HTTP Request exception:\n{repr(err)}")
+        except (IOError, ValueError, KeyError) as err:
+            logging.error(f"Download failed.\n{repr(err)}")
+        finally:
             if session:
                 session.close()
-            logging.info(f"    Downloading the file {url} to {file_name} ...")
-            session = requests_retry_session()
-            response = session.get(url, stream=True, verify=False)
-            if response.status_code == codes[u"OK"]:
-                with open(file_name, u"wb") as file_handle:
-                    file_handle.write(response.raw.read())
-            else:
-                logging.error(
-                    f"Not possible to download the file {url} to {file_name}"
-                )
-
-        success = True
-    except RequestException as err:
-        logging.error(f"HTTP Request exception:\n{repr(err)}")
-    except (IOError, ValueError, KeyError) as err:
-        logging.error(f"Download failed.\n{repr(err)}")
-    finally:
-        if session:
-            session.close()
-
-    logging.info(u"    Download finished.")
     return success, file_name
 
 
@@ -153,11 +153,7 @@ def _unzip_file(spec, build, pid):
     """
 
     file_name = build[u"file-name"]
-    if u".zip" in file_name:
-        data_file = spec.input[u"zip-extract"]
-    else:
-        data_file = spec.input[u"extract"]
-
+    data_file = "robot-plugin/output.xml"
     directory = spec.environment[u"paths"][u"DIR[WORKING,DATA]"]
     tmp_dir = join(directory, str(pid))
     try:
@@ -185,127 +181,132 @@ def _unzip_file(spec, build, pid):
         return False
 
 
-def download_and_unzip_data_file(spec, job, build, pid):
-    """Download and unzip a source file.
+def _download_xml(source, job, build, w_dir, arch):
+    """
 
-    :param spec: Specification read form the specification file.
-    :param job: Name of the Jenkins job.
-    :param build: Information about the build.
-    :param pid: PID of the process executing this method.
-    :type spec: Specification
-    :type job: str
-    :type build: dict
-    :type pid: int
-    :returns: True if the download was successful, otherwise False.
-    :rtype: bool
+    :param source:
+    :param job:
+    :param build:
+    :param w_dir: Path to working directory
+    :param arch:
+    :return:
     """
 
-    # Try to download .gz from s3_storage
-    file_name = spec.input[u"file-name"]
+    file_name = source.get(u"file-name", u"")
+    new_name = join(
+        w_dir,
+        f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name}"
+    )
     url = u"{0}/{1}".format(
-        spec.environment[u'urls'][u'URL[S3_STORAGE,LOG]'],
-        spec.input[u'download-path'].format(
+        source.get(u"url", u""),
+        source.get(u"path", u"").format(
             job=job, build=build[u'build'], filename=file_name
         )
     )
-    new_name = join(
-        spec.environment[u"paths"][u"DIR[WORKING,DATA]"],
-        f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name}"
+    logging.info(f"  Trying to download {url}")
+    success, downloaded_name = _download_file(
+        url, new_name, arch=arch, verify=(u"nginx" not in url), repeat=3
     )
+    return success, downloaded_name
 
-    logging.info(f"Trying to download {url}")
 
-    arch = bool(spec.configuration.get(u"archive-inputs", True))
-    success, downloaded_name = _download_file(url, new_name, arch=arch)
+def _download_xml_docs(source, job, build, w_dir, arch):
+    """
+
+    :param source:
+    :param job:
+    :param build:
+    :param w_dir: Path to working directory
+    :param arch:
+    :return:
+    """
 
-    if not success:
-        # Try to download .gz from logs.fd.io
-        file_name = spec.input[u"file-name"]
-        url = u"{0}/{1}".format(
-            spec.environment[u'urls'][u'URL[NEXUS,LOG]'],
-            spec.input[u'download-path'].format(
-                job=job, build=build[u'build'], filename=file_name
-            )
+    file_name = source.get(u"file-name", u"")
+    release = re.search(REGEX_RELEASE, job).group(2)
+    for rls in (release, u"master"):
+        try:
+            rls = f"rls{int(rls)}"
+        except ValueError:
+            pass  # It is master
+        url = (
+            f"{source.get(u'url', u'')}/"
+            f"{rls}/"
+            f"{source.get(u'path', u'')}/"
+            f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name}"
         )
         new_name = join(
-            spec.environment[u"paths"][u"DIR[WORKING,DATA]"],
+            w_dir,
             f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name}"
         )
 
-        logging.info(f"Trying to download {url}")
+        logging.info(f"  Trying to download {url}")
 
-        arch = bool(spec.configuration.get(u"archive-inputs", True))
         success, downloaded_name = _download_file(url, new_name, arch=arch)
+        if success:
+            if file_name.endswith(u".gz"):
+                with gzip.open(downloaded_name[:-3], u"rb") as gzip_file:
+                    file_content = gzip_file.read()
+                with open(downloaded_name[:-3], u"wb") as xml_file:
+                    xml_file.write(file_content)
+            break
 
-    if not success:
-
-        # Try to download .gz or .zip from docs.fd.io
-        file_name = (spec.input[u"file-name"], spec.input[u"zip-file-name"])
-        release = re.search(REGEX_RELEASE, job).group(2)
-        for idx, rls in enumerate((release, u"master", )):
-            try:
-                rls = f"rls{int(rls)}"
-            except ValueError:
-                # It is master
-                pass
-            url = (
-                f"{spec.environment[u'urls'][u'URL[NEXUS,DOC]']}/"
-                f"{rls}/"
-                f"{spec.environment[u'urls'][u'DIR[NEXUS,DOC]']}/"
-                f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name[idx]}"
-            )
-
-            logging.info(f"Downloading {url}")
-
-            new_name = join(
-                spec.environment[u"paths"][u"DIR[WORKING,DATA]"],
-                f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name[idx]}"
-            )
-            success, downloaded_name = _download_file(url, new_name, arch=arch)
-            if success:
-                file_name = file_name[idx]
-                if file_name.endswith(u".gz"):
-                    with gzip.open(downloaded_name[:-3], u"rb") as gzip_file:
-                        file_content = gzip_file.read()
-                    with open(downloaded_name[:-3], u"wb") as xml_file:
-                        xml_file.write(file_content)
-                break
-
-    if not success:
-
-        # Try to download .zip from jenkins.fd.io
-        file_name = spec.input[u"zip-file-name"]
-        download_path = spec.input[u"zip-download-path"]
-        if job.startswith(u"csit-"):
-            url = spec.environment[u"urls"][u"URL[JENKINS,CSIT]"]
-        else:
-            raise PresentationError(f"No url defined for the job {job}.")
-
-        full_name = download_path.format(
-            job=job, build=build[u"build"], filename=file_name
-        )
-        url = u"{0}/{1}".format(url, full_name)
-        new_name = join(
-            spec.environment[u"paths"][u"DIR[WORKING,DATA]"],
-            f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name}"
-        )
+    return success, downloaded_name
+
+
+def download_and_unzip_data_file(spec, job, build, pid):
+    """Download and unzip a source file.
+
+    :param spec: Specification read form the specification file.
+    :param job: Name of the Jenkins job.
+    :param build: Information about the build.
+    :param pid: PID of the process executing this method.
+    :type spec: Specification
+    :type job: str
+    :type build: dict
+    :type pid: int
+    :returns: True if the download was successful, otherwise False.
+    :rtype: bool
+    """
 
-        logging.info(f"Downloading {url}")
+    download = {
+        "xml": _download_xml,
+        "xml-docs": _download_xml_docs
+    }
 
-        success, downloaded_name = _download_file(url, new_name)
+    success = False
+    downloaded_name = u""
+    arch = bool(spec.environment.get(u"archive-inputs", True))
+
+    for source in spec.environment.get(u"data-sources", tuple()):
+        if not source.get(u"enabled", False):
+            continue
+        download_type = source.get(u"type", None)
+        if not download_type:
+            continue
+        success, downloaded_name = download[download_type](
+            source,
+            job,
+            build,
+            spec.environment[u"paths"][u"DIR[WORKING,DATA]"],
+            arch
+        )
+        if success:
+            source[u"successful-downloads"] += 1
+            build[u"source"] = source[u"type"]
+            break
 
+    # TODO: Remove when only .gz is used.
     if success and downloaded_name.endswith(u".zip"):
         if not is_zipfile(downloaded_name):
-            logging.error(f"Zip file {new_name} is corrupted.")
+            logging.error(f"Zip file {downloaded_name} is corrupted.")
             success = False
 
     if success:
-        build[u"file-name"] = downloaded_name
-
-        if file_name.endswith(u".gz"):
+        if downloaded_name.endswith(u".gz"):
             build[u"file-name"] = downloaded_name[:-3]
-
-        if downloaded_name.endswith(u".zip"):
+        # TODO: Remove when only .gz is used.
+        elif downloaded_name.endswith(u".zip"):
+            build[u"file-name"] = downloaded_name
             success = _unzip_file(spec, build, pid)
 
     return success