def test_index_search(responses, params, state, expires, expected): taskid = "abc" index_path = "foo.bar.latest" responses.add( responses.GET, f"https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/{index_path}", json={"taskId": taskid}, status=200, ) responses.add( responses.GET, f"https://firefox-ci-tc.services.mozilla.com/api/queue/v1/task/{taskid}/status", json={ "status": { "state": state, "expires": expires, } }, status=200, ) opt = IndexSearch() deadline = "2021-06-07T19:03:20.482Z" assert opt.should_replace_task({}, params, deadline, (index_path,)) == expected
def load_image_by_name(image_name, tag=None): params = {'level': six.ensure_text(os.environ.get('MOZ_SCM_LEVEL', '3'))} tasks = load_tasks_for_kind(params, 'docker-image') task = tasks['build-docker-image-{}'.format(image_name)] task_id = IndexSearch().should_replace_task( task, {}, task.optimization.get('index-search', [])) if task_id in (True, False): print('Could not find artifacts for a docker image ' 'named `{image_name}`. Local commits and other changes ' 'in your checkout may cause this error. Try ' 'updating to a fresh checkout of mozilla-central ' 'to download image.'.format(image_name=image_name)) return False return load_image_by_task_id(task_id, tag)
def load_image_by_name(image_name, tag=None): params = {"level": six.ensure_text(os.environ.get("MOZ_SCM_LEVEL", "3"))} tasks = load_tasks_for_kind(params, "docker-image") task = tasks["docker-image-{}".format(image_name)] task_id = IndexSearch().should_replace_task( task, {}, task.optimization.get("index-search", [])) if task_id in (True, False): print("Could not find artifacts for a docker image " "named `{image_name}`. Local commits and other changes " "in your checkout may cause this error. Try " "updating to a fresh checkout of mozilla-central " "to download image.".format(image_name=image_name)) return False return load_image_by_task_id(task_id, tag)
def artifact_toolchain(self, verbose=False, cache_dir=None, skip_cache=False, from_build=(), tooltool_manifest=None, authentication_file=None, no_unpack=False, retry=0, bootstrap=False, artifact_manifest=None, files=()): '''Download, cache and install pre-built toolchains. ''' from mozbuild.artifacts import ArtifactCache from mozbuild.action.tooltool import ( FileRecord, open_manifest, unpack_file, ) import redo import requests import time from taskgraph.util.taskcluster import ( get_artifact_url, ) start = time.time() self._set_log_level(verbose) # Normally, we'd use self.log_manager.enable_unstructured(), # but that enables all logging, while we only really want tooltool's # and it also makes structured log output twice. # So we manually do what it does, and limit that to the tooltool # logger. if self.log_manager.terminal_handler: logging.getLogger('mozbuild.action.tooltool').addHandler( self.log_manager.terminal_handler) logging.getLogger('redo').addHandler( self.log_manager.terminal_handler) self.log_manager.terminal_handler.addFilter( self.log_manager.structured_filter) if not cache_dir: cache_dir = os.path.join(self._mach_context.state_dir, 'toolchains') tooltool_host = os.environ.get('TOOLTOOL_HOST', 'tooltool.mozilla-releng.net') taskcluster_proxy_url = os.environ.get('TASKCLUSTER_PROXY_URL') if taskcluster_proxy_url: tooltool_url = '{}/{}'.format(taskcluster_proxy_url, tooltool_host) else: tooltool_url = 'https://{}'.format(tooltool_host) cache = ArtifactCache(cache_dir=cache_dir, log=self.log, skip_cache=skip_cache) class DownloadRecord(FileRecord): def __init__(self, url, *args, **kwargs): super(DownloadRecord, self).__init__(*args, **kwargs) self.url = url self.basename = self.filename def fetch_with(self, cache): self.filename = cache.fetch(self.url) return self.filename def validate(self): if self.size is None and self.digest is None: return True return super(DownloadRecord, self).validate() class ArtifactRecord(DownloadRecord): def __init__(self, task_id, artifact_name): for _ in redo.retrier(attempts=retry + 1, sleeptime=60): cot = cache._download_manager.session.get( get_artifact_url(task_id, 'public/chain-of-trust.json')) if cot.status_code >= 500: continue cot.raise_for_status() break else: cot.raise_for_status() digest = algorithm = None data = json.loads(cot.text) for algorithm, digest in (data.get('artifacts', {}).get(artifact_name, {}).items()): pass name = os.path.basename(artifact_name) artifact_url = get_artifact_url( task_id, artifact_name, use_proxy=not artifact_name.startswith('public/')) super(ArtifactRecord, self).__init__(artifact_url, name, None, digest, algorithm, unpack=True) records = OrderedDict() downloaded = [] if tooltool_manifest: manifest = open_manifest(tooltool_manifest) for record in manifest.file_records: url = '{}/{}/{}'.format(tooltool_url, record.algorithm, record.digest) records[record.filename] = DownloadRecord( url, record.filename, record.size, record.digest, record.algorithm, unpack=record.unpack, version=record.version, visibility=record.visibility) if from_build: if 'MOZ_AUTOMATION' in os.environ: self.log( logging.ERROR, 'artifact', {}, 'Do not use --from-build in automation; all dependencies ' 'should be determined in the decision task.') return 1 from taskgraph.optimize.strategies import IndexSearch from taskgraph.generator import load_tasks_for_kind params = { 'level': six.ensure_text(os.environ.get('MOZ_SCM_LEVEL', '3')) } root_dir = mozpath.join(self.topsrcdir, 'taskcluster/ci') toolchains = load_tasks_for_kind(params, 'toolchain', root_dir=root_dir) aliases = {} for t in toolchains.values(): alias = t.attributes.get('toolchain-alias') if alias: aliases['toolchain-{}'.format(alias)] = \ t.task['metadata']['name'] for b in from_build: user_value = b if not b.startswith('toolchain-'): b = 'toolchain-{}'.format(b) task = toolchains.get(aliases.get(b, b)) if not task: self.log( logging.ERROR, 'artifact', {'build': user_value}, 'Could not find a toolchain build named `{build}`') return 1 # Ensure that toolchains installed by `mach bootstrap` have the # `local-toolchain attribute set. Taskgraph ensures that these # are built on trunk projects, so the task will be available to # install here. if bootstrap and not task.attributes.get('local-toolchain'): self.log( logging.ERROR, 'artifact', {'build': user_value}, 'Toolchain `{build}` is not annotated as used for local development.' ) return 1 artifact_name = task.attributes.get('toolchain-artifact') self.log( logging.DEBUG, 'artifact', { 'name': artifact_name, 'index': task.optimization.get('index-search') }, 'Searching for {name} in {index}') task_id = IndexSearch().should_replace_task( task, {}, task.optimization.get('index-search', [])) if task_id in (True, False) or not artifact_name: self.log(logging.ERROR, 'artifact', {'build': user_value}, _COULD_NOT_FIND_ARTIFACTS_TEMPLATE) return 1 self.log(logging.DEBUG, 'artifact', { 'name': artifact_name, 'task_id': task_id }, 'Found {name} in {task_id}') record = ArtifactRecord(task_id, artifact_name) records[record.filename] = record # Handle the list of files of the form path@task-id on the command # line. Each of those give a path to an artifact to download. for f in files: if '@' not in f: self.log(logging.ERROR, 'artifact', {}, 'Expected a list of files of the form path@task-id') return 1 name, task_id = f.rsplit('@', 1) record = ArtifactRecord(task_id, name) records[record.filename] = record for record in six.itervalues(records): self.log(logging.INFO, 'artifact', {'name': record.basename}, 'Setting up artifact {name}') valid = False # sleeptime is 60 per retry.py, used by tooltool_wrapper.sh for attempt, _ in enumerate( redo.retrier(attempts=retry + 1, sleeptime=60)): try: record.fetch_with(cache) except (requests.exceptions.HTTPError, requests.exceptions.ChunkedEncodingError, requests.exceptions.ConnectionError) as e: if isinstance(e, requests.exceptions.HTTPError): # The relengapi proxy likes to return error 400 bad request # which seems improbably to be due to our (simple) GET # being borked. status = e.response.status_code should_retry = status >= 500 or status == 400 else: should_retry = True if should_retry or attempt < retry: level = logging.WARN else: level = logging.ERROR self.log(level, 'artifact', {}, str(e)) if not should_retry: break if attempt < retry: self.log(logging.INFO, 'artifact', {}, 'Will retry in a moment...') continue try: valid = record.validate() except Exception: pass if not valid: os.unlink(record.filename) if attempt < retry: self.log( logging.INFO, 'artifact', {}, 'Corrupt download. Will retry in a moment...') continue downloaded.append(record) break if not valid: self.log(logging.ERROR, 'artifact', {'name': record.basename}, 'Failed to download {name}') return 1 artifacts = {} if artifact_manifest else None for record in downloaded: local = os.path.join(os.getcwd(), record.basename) if os.path.exists(local): os.unlink(local) # unpack_file needs the file with its final name to work # (https://github.com/mozilla/build-tooltool/issues/38), so we # need to copy it, even though we remove it later. Use hard links # when possible. try: os.link(record.filename, local) except Exception: shutil.copy(record.filename, local) # Keep a sha256 of each downloaded file, for the chain-of-trust # validation. if artifact_manifest is not None: with open(local, 'rb') as fh: h = hashlib.sha256() while True: data = fh.read(1024 * 1024) if not data: break h.update(data) artifacts[record.url] = { 'sha256': h.hexdigest(), } if record.unpack and not no_unpack: # Try to unpack the file. If we get an exception importing # zstandard when calling unpack_file, we can try installing # zstandard locally and trying again try: unpack_file(local) except ImportError as e: # Need to do this branch while this code is still exercised # by Python 2. if six.PY3 and e.name != "zstandard": raise elif six.PY2 and e.message != 'No module named zstandard': raise self._ensure_zstd() unpack_file(local) os.unlink(local) if not downloaded: self.log(logging.ERROR, 'artifact', {}, 'Nothing to download') if files: return 1 if artifacts: ensureParentDir(artifact_manifest) with open(artifact_manifest, 'w') as fh: json.dump(artifacts, fh, indent=4, sort_keys=True) if 'MOZ_AUTOMATION' in os.environ: end = time.time() perfherder_data = { 'framework': { 'name': 'build_metrics' }, 'suites': [{ 'name': 'mach_artifact_toolchain', 'value': end - start, 'lowerIsBetter': True, 'shouldAlert': False, 'subtests': [], }], } self.log(logging.INFO, 'perfherder', {'data': json.dumps(perfherder_data)}, 'PERFHERDER_DATA: {data}') return 0
def artifact_toolchain( self, verbose=False, cache_dir=None, skip_cache=False, from_build=(), tooltool_manifest=None, no_unpack=False, retry=0, bootstrap=False, artifact_manifest=None, ): """Download, cache and install pre-built toolchains.""" from mozbuild.artifacts import ArtifactCache from mozbuild.action.tooltool import ( FileRecord, open_manifest, unpack_file, ) import redo import requests import time from taskgraph.util.taskcluster import get_artifact_url start = time.time() self._set_log_level(verbose) # Normally, we'd use self.log_manager.enable_unstructured(), # but that enables all logging, while we only really want tooltool's # and it also makes structured log output twice. # So we manually do what it does, and limit that to the tooltool # logger. if self.log_manager.terminal_handler: logging.getLogger("mozbuild.action.tooltool").addHandler( self.log_manager.terminal_handler) logging.getLogger("redo").addHandler( self.log_manager.terminal_handler) self.log_manager.terminal_handler.addFilter( self.log_manager.structured_filter) if not cache_dir: cache_dir = os.path.join(self._mach_context.state_dir, "toolchains") tooltool_host = os.environ.get("TOOLTOOL_HOST", "tooltool.mozilla-releng.net") taskcluster_proxy_url = os.environ.get("TASKCLUSTER_PROXY_URL") if taskcluster_proxy_url: tooltool_url = "{}/{}".format(taskcluster_proxy_url, tooltool_host) else: tooltool_url = "https://{}".format(tooltool_host) cache = ArtifactCache(cache_dir=cache_dir, log=self.log, skip_cache=skip_cache) class DownloadRecord(FileRecord): def __init__(self, url, *args, **kwargs): super(DownloadRecord, self).__init__(*args, **kwargs) self.url = url self.basename = self.filename def fetch_with(self, cache): self.filename = cache.fetch(self.url) return self.filename def validate(self): if self.size is None and self.digest is None: return True return super(DownloadRecord, self).validate() class ArtifactRecord(DownloadRecord): def __init__(self, task_id, artifact_name): for _ in redo.retrier(attempts=retry + 1, sleeptime=60): cot = cache._download_manager.session.get( get_artifact_url(task_id, "public/chain-of-trust.json")) if cot.status_code >= 500: continue cot.raise_for_status() break else: cot.raise_for_status() digest = algorithm = None data = json.loads(cot.text) for algorithm, digest in (data.get("artifacts", {}).get(artifact_name, {}).items()): pass name = os.path.basename(artifact_name) artifact_url = get_artifact_url( task_id, artifact_name, use_proxy=not artifact_name.startswith("public/"), ) super(ArtifactRecord, self).__init__(artifact_url, name, None, digest, algorithm, unpack=True) records = OrderedDict() downloaded = [] if tooltool_manifest: manifest = open_manifest(tooltool_manifest) for record in manifest.file_records: url = "{}/{}/{}".format(tooltool_url, record.algorithm, record.digest) records[record.filename] = DownloadRecord( url, record.filename, record.size, record.digest, record.algorithm, unpack=record.unpack, version=record.version, visibility=record.visibility, ) if from_build: if "MOZ_AUTOMATION" in os.environ: self.log( logging.ERROR, "artifact", {}, "Do not use --from-build in automation; all dependencies " "should be determined in the decision task.", ) return 1 from taskgraph.optimize.strategies import IndexSearch from mozbuild.toolchains import toolchain_task_definitions tasks = toolchain_task_definitions() for b in from_build: user_value = b if not b.startswith("toolchain-"): b = "toolchain-{}".format(b) task = tasks.get(b) if not task: self.log( logging.ERROR, "artifact", {"build": user_value}, "Could not find a toolchain build named `{build}`", ) return 1 # Ensure that toolchains installed by `mach bootstrap` have the # `local-toolchain attribute set. Taskgraph ensures that these # are built on trunk projects, so the task will be available to # install here. if bootstrap and not task.attributes.get("local-toolchain"): self.log( logging.ERROR, "artifact", {"build": user_value}, "Toolchain `{build}` is not annotated as used for local development.", ) return 1 artifact_name = task.attributes.get("toolchain-artifact") self.log( logging.DEBUG, "artifact", { "name": artifact_name, "index": task.optimization.get("index-search"), }, "Searching for {name} in {index}", ) task_id = IndexSearch().should_replace_task( task, {}, task.optimization.get("index-search", [])) if task_id in (True, False) or not artifact_name: self.log( logging.ERROR, "artifact", {"build": user_value}, _COULD_NOT_FIND_ARTIFACTS_TEMPLATE, ) # Get and print some helpful info for diagnosis. repo = mozversioncontrol.get_repository_object( self.topsrcdir) changed_files = set(repo.get_outgoing_files()) | set( repo.get_changed_files()) if changed_files: self.log( logging.ERROR, "artifact", {}, "Hint: consider reverting your local changes " "to the following files: %s" % sorted(changed_files), ) if "TASKCLUSTER_ROOT_URL" in os.environ: self.log( logging.ERROR, "artifact", {"build": user_value}, "Due to the environment variable TASKCLUSTER_ROOT_URL " "being set, the artifacts were expected to be found " "on {}. If this was unintended, unset " "TASKCLUSTER_ROOT_URL and try again.".format( os.environ["TASKCLUSTER_ROOT_URL"]), ) return 1 self.log( logging.DEBUG, "artifact", { "name": artifact_name, "task_id": task_id }, "Found {name} in {task_id}", ) record = ArtifactRecord(task_id, artifact_name) records[record.filename] = record for record in six.itervalues(records): self.log( logging.INFO, "artifact", {"name": record.basename}, "Setting up artifact {name}", ) valid = False # sleeptime is 60 per retry.py, used by tooltool_wrapper.sh for attempt, _ in enumerate( redo.retrier(attempts=retry + 1, sleeptime=60)): try: record.fetch_with(cache) except ( requests.exceptions.HTTPError, requests.exceptions.ChunkedEncodingError, requests.exceptions.ConnectionError, ) as e: if isinstance(e, requests.exceptions.HTTPError): # The relengapi proxy likes to return error 400 bad request # which seems improbably to be due to our (simple) GET # being borked. status = e.response.status_code should_retry = status >= 500 or status == 400 else: should_retry = True if should_retry or attempt < retry: level = logging.WARN else: level = logging.ERROR self.log(level, "artifact", {}, str(e)) if not should_retry: break if attempt < retry: self.log(logging.INFO, "artifact", {}, "Will retry in a moment...") continue try: valid = record.validate() except Exception: pass if not valid: os.unlink(record.filename) if attempt < retry: self.log( logging.INFO, "artifact", {}, "Corrupt download. Will retry in a moment...", ) continue downloaded.append(record) break if not valid: self.log( logging.ERROR, "artifact", {"name": record.basename}, "Failed to download {name}", ) return 1 artifacts = {} if artifact_manifest else None for record in downloaded: local = os.path.join(os.getcwd(), record.basename) if os.path.exists(local): os.unlink(local) # unpack_file needs the file with its final name to work # (https://github.com/mozilla/build-tooltool/issues/38), so we # need to copy it, even though we remove it later. Use hard links # when possible. try: os.link(record.filename, local) except Exception: shutil.copy(record.filename, local) # Keep a sha256 of each downloaded file, for the chain-of-trust # validation. if artifact_manifest is not None: with open(local, "rb") as fh: h = hashlib.sha256() while True: data = fh.read(1024 * 1024) if not data: break h.update(data) artifacts[record.url] = { "sha256": h.hexdigest(), } if record.unpack and not no_unpack: unpack_file(local) os.unlink(local) if not downloaded: self.log(logging.ERROR, "artifact", {}, "Nothing to download") if artifacts: ensureParentDir(artifact_manifest) with open(artifact_manifest, "w") as fh: json.dump(artifacts, fh, indent=4, sort_keys=True) if "MOZ_AUTOMATION" in os.environ: end = time.time() perfherder_data = { "framework": { "name": "build_metrics" }, "suites": [{ "name": "mach_artifact_toolchain", "value": end - start, "lowerIsBetter": True, "shouldAlert": False, "subtests": [], }], } self.log( logging.INFO, "perfherder", {"data": json.dumps(perfherder_data)}, "PERFHERDER_DATA: {data}", ) return 0
def artifact_toolchain(self, verbose=False, cache_dir=None, skip_cache=False, from_build=(), tooltool_manifest=None, authentication_file=None, tooltool_url=None, no_unpack=False, retry=None, artifact_manifest=None, files=()): '''Download, cache and install pre-built toolchains. ''' from mozbuild.artifacts import ArtifactCache from mozbuild.action.tooltool import ( FileRecord, open_manifest, unpack_file, ) from requests.adapters import HTTPAdapter import redo import requests from taskgraph.util.taskcluster import ( get_artifact_url, ) self._set_log_level(verbose) # Normally, we'd use self.log_manager.enable_unstructured(), # but that enables all logging, while we only really want tooltool's # and it also makes structured log output twice. # So we manually do what it does, and limit that to the tooltool # logger. if self.log_manager.terminal_handler: logging.getLogger('mozbuild.action.tooltool').addHandler( self.log_manager.terminal_handler) logging.getLogger('redo').addHandler( self.log_manager.terminal_handler) self.log_manager.terminal_handler.addFilter( self.log_manager.structured_filter) if not cache_dir: cache_dir = os.path.join(self._mach_context.state_dir, 'toolchains') tooltool_url = (tooltool_url or 'https://tooltool.mozilla-releng.net').rstrip('/') cache = ArtifactCache(cache_dir=cache_dir, log=self.log, skip_cache=skip_cache) if authentication_file: with open(authentication_file, 'rb') as f: token = f.read().strip() class TooltoolAuthenticator(HTTPAdapter): def send(self, request, *args, **kwargs): request.headers['Authorization'] = \ 'Bearer {}'.format(token) return super(TooltoolAuthenticator, self).send(request, *args, **kwargs) cache._download_manager.session.mount(tooltool_url, TooltoolAuthenticator()) class DownloadRecord(FileRecord): def __init__(self, url, *args, **kwargs): super(DownloadRecord, self).__init__(*args, **kwargs) self.url = url self.basename = self.filename def fetch_with(self, cache): self.filename = cache.fetch(self.url) return self.filename def validate(self): if self.size is None and self.digest is None: return True return super(DownloadRecord, self).validate() class ArtifactRecord(DownloadRecord): def __init__(self, task_id, artifact_name): for _ in redo.retrier(attempts=retry + 1, sleeptime=60): cot = cache._download_manager.session.get( get_artifact_url(task_id, 'public/chain-of-trust.json')) if cot.status_code >= 500: continue cot.raise_for_status() break else: cot.raise_for_status() digest = algorithm = None data = json.loads(cot.content) for algorithm, digest in (data.get('artifacts', {}).get(artifact_name, {}).items()): pass name = os.path.basename(artifact_name) artifact_url = get_artifact_url( task_id, artifact_name, use_proxy=not artifact_name.startswith('public/')) super(ArtifactRecord, self).__init__(artifact_url, name, None, digest, algorithm, unpack=True) records = OrderedDict() downloaded = [] if tooltool_manifest: manifest = open_manifest(tooltool_manifest) for record in manifest.file_records: url = '{}/{}/{}'.format(tooltool_url, record.algorithm, record.digest) records[record.filename] = DownloadRecord( url, record.filename, record.size, record.digest, record.algorithm, unpack=record.unpack, version=record.version, visibility=record.visibility) if from_build: if 'MOZ_AUTOMATION' in os.environ: self.log( logging.ERROR, 'artifact', {}, 'Do not use --from-build in automation; all dependencies ' 'should be determined in the decision task.') return 1 from taskgraph.optimize.strategies import IndexSearch from taskgraph.parameters import Parameters from taskgraph.generator import load_tasks_for_kind params = Parameters( level=os.environ.get('MOZ_SCM_LEVEL', '3'), strict=False, ) root_dir = mozpath.join(self.topsrcdir, 'taskcluster/ci') toolchains = load_tasks_for_kind(params, 'toolchain', root_dir=root_dir) aliases = {} for t in toolchains.values(): alias = t.attributes.get('toolchain-alias') if alias: aliases['toolchain-{}'.format(alias)] = \ t.task['metadata']['name'] for b in from_build: user_value = b if not b.startswith('toolchain-'): b = 'toolchain-{}'.format(b) task = toolchains.get(aliases.get(b, b)) if not task: self.log( logging.ERROR, 'artifact', {'build': user_value}, 'Could not find a toolchain build named `{build}`') return 1 task_id = IndexSearch().should_replace_task( task, {}, task.optimization.get('index-search', [])) artifact_name = task.attributes.get('toolchain-artifact') if task_id in (True, False) or not artifact_name: self.log( logging.ERROR, 'artifact', {'build': user_value}, 'Could not find artifacts for a toolchain build ' 'named `{build}`. Local commits and other changes ' 'in your checkout may cause this error. Try ' 'updating to a fresh checkout of mozilla-central ' 'to use artifact builds.') return 1 record = ArtifactRecord(task_id, artifact_name) records[record.filename] = record # Handle the list of files of the form path@task-id on the command # line. Each of those give a path to an artifact to download. for f in files: if '@' not in f: self.log(logging.ERROR, 'artifact', {}, 'Expected a list of files of the form path@task-id') return 1 name, task_id = f.rsplit('@', 1) record = ArtifactRecord(task_id, name) records[record.filename] = record for record in records.itervalues(): self.log(logging.INFO, 'artifact', {'name': record.basename}, 'Setting up artifact {name}') valid = False # sleeptime is 60 per retry.py, used by tooltool_wrapper.sh for attempt, _ in enumerate( redo.retrier(attempts=retry + 1, sleeptime=60)): try: record.fetch_with(cache) except (requests.exceptions.HTTPError, requests.exceptions.ChunkedEncodingError, requests.exceptions.ConnectionError) as e: if isinstance(e, requests.exceptions.HTTPError): # The relengapi proxy likes to return error 400 bad request # which seems improbably to be due to our (simple) GET # being borked. status = e.response.status_code should_retry = status >= 500 or status == 400 else: should_retry = True if should_retry or attempt < retry: level = logging.WARN else: level = logging.ERROR # e.message is not always a string, so convert it first. self.log(level, 'artifact', {}, str(e.message)) if not should_retry: break if attempt < retry: self.log(logging.INFO, 'artifact', {}, 'Will retry in a moment...') continue try: valid = record.validate() except Exception: pass if not valid: os.unlink(record.filename) if attempt < retry: self.log( logging.INFO, 'artifact', {}, 'Corrupt download. Will retry in a moment...') continue downloaded.append(record) break if not valid: self.log(logging.ERROR, 'artifact', {'name': record.basename}, 'Failed to download {name}') return 1 artifacts = {} if artifact_manifest else None for record in downloaded: local = os.path.join(os.getcwd(), record.basename) if os.path.exists(local): os.unlink(local) # unpack_file needs the file with its final name to work # (https://github.com/mozilla/build-tooltool/issues/38), so we # need to copy it, even though we remove it later. Use hard links # when possible. try: os.link(record.filename, local) except Exception: shutil.copy(record.filename, local) # Keep a sha256 of each downloaded file, for the chain-of-trust # validation. if artifact_manifest is not None: with open(local) as fh: h = hashlib.sha256() while True: data = fh.read(1024 * 1024) if not data: break h.update(data) artifacts[record.url] = { 'sha256': h.hexdigest(), } if record.unpack and not no_unpack: unpack_file(local) os.unlink(local) if not downloaded: self.log(logging.ERROR, 'artifact', {}, 'Nothing to download') if files: return 1 if artifacts: ensureParentDir(artifact_manifest) with open(artifact_manifest, 'w') as fh: json.dump(artifacts, fh, indent=4, sort_keys=True) return 0