+def _download_xml(source, job, build, w_dir, arch):
+ """
+
+ :param source:
+ :param job:
+ :param build:
+ :param w_dir: Path to working directory
+ :param arch:
+ :return:
+ """
+
+ file_name = source.get(u"file-name", u"")
+ new_name = join(
+ w_dir,
+ f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name}"
+ )
+ url = u"{0}/{1}".format(
+ source.get(u"url", u""),
+ source.get(u"path", u"").format(
+ job=job, build=build[u'build'], filename=file_name
+ )
+ )
+ logging.info(f" Trying to download {url}")
+ success, downloaded_name = _download_file(
+ url, new_name, arch=arch, verify=(u"nginx" not in url), repeat=3
+ )
+ return success, downloaded_name
+
+
+def _download_xml_docs(source, job, build, w_dir, arch):
+ """
+
+ :param source:
+ :param job:
+ :param build:
+ :param w_dir: Path to working directory
+ :param arch:
+ :return:
+ """
+
+ file_name = source.get(u"file-name", u"")
+ release = re.search(REGEX_RELEASE, job).group(2)
+ for rls in (release, u"master"):
+ try:
+ rls = f"rls{int(rls)}"
+ except ValueError:
+ pass # It is master
+ url = (
+ f"{source.get(u'url', u'')}/"
+ f"{rls}/"
+ f"{source.get(u'path', u'')}/"
+ f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name}"
+ )
+ new_name = join(
+ w_dir,
+ f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name}"
+ )
+
+ logging.info(f" Trying to download {url}")
+
+ success, downloaded_name = _download_file(url, new_name, arch=arch)
+ if success:
+ if file_name.endswith(u".gz"):
+ with gzip.open(downloaded_name[:-3], u"rb") as gzip_file:
+ file_content = gzip_file.read()
+ with open(downloaded_name[:-3], u"wb") as xml_file:
+ xml_file.write(file_content)
+ break
+
+ return success, downloaded_name
+
+