PAL: repeat the download if it fails 2
[csit.git] / resources / tools / presentation / input_data_files.py
index 8b941f2..e5b26e9 100644 (file)
@@ -21,7 +21,7 @@ import gzip
 
 from os import rename, mkdir
 from os.path import join
-from http.client import responses
+from http.client import responses, HTTPException
 from zipfile import ZipFile, is_zipfile, BadZipfile
 
 import requests
@@ -30,6 +30,8 @@ from requests.adapters import HTTPAdapter, Retry
 from requests.exceptions import RequestException
 from requests import codes
 
+from urllib3.exceptions import HTTPError
+
 from pal_errors import PresentationError
 
 
@@ -42,15 +44,19 @@ SEPARATOR = u"__"
 REGEX_RELEASE = re.compile(r'(\D*)(\d{4}|master)(\D*)')
 
 
-def _download_file(url, file_name, arch=False):
+def _download_file(url, file_name, arch=False, verify=True, repeat=1):
     """Download a file with input data.
 
     :param url: URL to the file to download.
     :param file_name: Name of file to download.
-    :param arch: If True, also .gz file is downloaded
+    :param arch: If True, also .gz file is downloaded.
+    :param verify: If true, verify the certificate.
+    :param repeat: The number of attempts to download the file.
     :type url: str
     :type file_name: str
     :type arch: bool
+    :type verify: bool
+    :type repeat: int
     :returns: True if the download was successful, otherwise False.
     :rtype: bool
     """
@@ -86,56 +92,62 @@ def _download_file(url, file_name, arch=False):
         return session
 
     success = False
-    session = None
-    try:
-        logging.info(f"    Connecting to {url} ...")
-        session = requests_retry_session()
-        response = session.get(url, stream=True)
-        code = response.status_code
-        logging.info(f"    {code}: {responses[code]}")
-
-        if code != codes[u"OK"]:
-            if session:
-                session.close()
-            url = url.replace(u"_info", u"")
+    while repeat:
+        repeat -= 1
+        session = None
+        try:
             logging.info(f"    Connecting to {url} ...")
             session = requests_retry_session()
-            response = session.get(url, stream=True)
+            response = session.get(url, stream=True, verify=verify)
             code = response.status_code
             logging.info(f"    {code}: {responses[code]}")
-            if code != codes[u"OK"]:
-                return False, file_name
-            file_name = file_name.replace(u"_info", u"")
 
-        dst_file_name = file_name.replace(u".gz", u"")
-        logging.info(f"    Downloading the file {url} to {dst_file_name} ...")
-        with open(dst_file_name, u"wb") as file_handle:
-            for chunk in response.iter_content(chunk_size=CHUNK_SIZE):
-                if chunk:
-                    file_handle.write(chunk)
-
-        if arch and u".gz" in file_name:
+            if code != codes[u"OK"]:
+                if session:
+                    session.close()
+                url = url.replace(u"_info", u"")
+                logging.info(f"    Connecting to {url} ...")
+                session = requests_retry_session()
+                response = session.get(url, stream=True, verify=verify)
+                code = response.status_code
+                logging.info(f"    {code}: {responses[code]}")
+                if code != codes[u"OK"]:
+                    return False, file_name
+                file_name = file_name.replace(u"_info", u"")
+
+            dst_file_name = file_name.replace(u".gz", u"")
+            logging.info(f"    Downloading the file {url} to {dst_file_name}")
+            with open(dst_file_name, u"wb") as file_handle:
+                for chunk in response.iter_content(chunk_size=CHUNK_SIZE):
+                    if chunk:
+                        file_handle.write(chunk)
+
+            if arch and u".gz" in file_name:
+                if session:
+                    session.close()
+                logging.info(f"    Downloading the file {url} to {file_name}")
+                session = requests_retry_session()
+                response = session.get(url, stream=True, verify=verify)
+                if response.status_code == codes[u"OK"]:
+                    with open(file_name, u"wb") as file_handle:
+                        file_handle.write(response.raw.read())
+                else:
+                    logging.error(
+                        f"Not possible to download the file "
+                        f"{url} to {file_name}"
+                    )
+
+            success = True
+            repeat = 0
+        except (HTTPException, HTTPError) as err:
+            logging.error(f"Connection broken:\n{repr(err)}")
+        except RequestException as err:
+            logging.error(f"HTTP Request exception:\n{repr(err)}")
+        except (IOError, ValueError, KeyError) as err:
+            logging.error(f"Download failed.\n{repr(err)}")
+        finally:
             if session:
                 session.close()
-            logging.info(f"    Downloading the file {url} to {file_name} ...")
-            session = requests_retry_session()
-            response = session.get(url, stream=True)
-            if response.status_code == codes[u"OK"]:
-                with open(file_name, u"wb") as file_handle:
-                    file_handle.write(response.raw.read())
-            else:
-                logging.error(
-                    f"Not possible to download the file {url} to {file_name}"
-                )
-
-        success = True
-    except RequestException as err:
-        logging.error(f"HTTP Request exception:\n{repr(err)}")
-    except (IOError, ValueError, KeyError) as err:
-        logging.error(f"Download failed.\n{repr(err)}")
-    finally:
-        if session:
-            session.close()
 
     logging.info(u"    Download finished.")
     return success, file_name
@@ -200,11 +212,10 @@ def download_and_unzip_data_file(spec, job, build, pid):
     :rtype: bool
     """
 
-    # Try to download .gz from logs.fd.io
-
+    # Try to download .gz from s3_storage
     file_name = spec.input[u"file-name"]
     url = u"{0}/{1}".format(
-        spec.environment[u'urls'][u'URL[NEXUS,LOG]'],
+        spec.environment[u'urls'][u'URL[S3_STORAGE,LOG]'],
         spec.input[u'download-path'].format(
             job=job, build=build[u'build'], filename=file_name
         )
@@ -217,7 +228,30 @@ def download_and_unzip_data_file(spec, job, build, pid):
     logging.info(f"Trying to download {url}")
 
     arch = bool(spec.configuration.get(u"archive-inputs", True))
-    success, downloaded_name = _download_file(url, new_name, arch=arch)
+    success, downloaded_name = _download_file(
+        url, new_name, arch=arch, verify=False, repeat=3
+    )
+
+    if not success:
+        # Try to download .gz from logs.fd.io
+        file_name = spec.input[u"file-name"]
+        url = u"{0}/{1}".format(
+            spec.environment[u'urls'][u'URL[NEXUS,LOG]'],
+            spec.input[u'download-path'].format(
+                job=job, build=build[u'build'], filename=file_name
+            )
+        )
+        new_name = join(
+            spec.environment[u"paths"][u"DIR[WORKING,DATA]"],
+            f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name}"
+        )
+
+        logging.info(f"Trying to download {url}")
+
+        arch = bool(spec.configuration.get(u"archive-inputs", True))
+        success, downloaded_name = _download_file(
+            url, new_name, arch=arch, verify=True, repeat=3
+        )
 
     if not success:
 
@@ -260,8 +294,6 @@ def download_and_unzip_data_file(spec, job, build, pid):
         download_path = spec.input[u"zip-download-path"]
         if job.startswith(u"csit-"):
             url = spec.environment[u"urls"][u"URL[JENKINS,CSIT]"]
-        elif job.startswith(u"hc2vpp-"):
-            url = spec.environment[u"urls"][u"URL[JENKINS,HC]"]
         else:
             raise PresentationError(f"No url defined for the job {job}.")