async def upload_artifacts(context): """Upload the files in `artifact_dir`, preserving relative paths. This function expects the directory structure in `artifact_dir` to remain the same. So if we want the files in `public/...`, create an `artifact_dir/public` and put the files in there. Args: context (scriptworker.context.Context): the scriptworker context. Raises: Exception: any exceptions the tasks raise. """ file_list = {} for target_path in filepaths_in_dir(context.config['artifact_dir']): path = os.path.join(context.config['artifact_dir'], target_path) file_list[target_path] = { 'path': path, 'target_path': target_path, 'content_type': None, } tasks = [] for upload_config in file_list.values(): tasks.append( asyncio.ensure_future( retry_create_artifact( context, upload_config['path'], target_path=upload_config['target_path'], content_type=upload_config['content_type']))) await raise_future_exceptions(tasks)
def test_filepaths_in_dir(tmpdir): filepaths = sorted(["asdfasdf/lwekrjweoi/lsldkfjs", "lkdsjf/werew/sdlkfds", "lsdkjf/sdlkfds", "lkdlkf/lsldkfjs"]) for path in filepaths: parent_dir = os.path.join(tmpdir, os.path.dirname(path)) os.makedirs(parent_dir) touch(os.path.join(tmpdir, path)) assert sorted(utils.filepaths_in_dir(tmpdir)) == filepaths
def test_filepaths_in_dir(tmpdir): filepaths = sorted([ "asdfasdf/lwekrjweoi/lsldkfjs", "lkdsjf/werew/sdlkfds", "lsdkjf/sdlkfds", "lkdlkf/lsldkfjs", ]) for path in filepaths: parent_dir = os.path.join(tmpdir, os.path.dirname(path)) os.makedirs(parent_dir) touch(os.path.join(tmpdir, path)) assert sorted(utils.filepaths_in_dir(tmpdir)) == filepaths
async def invoke(self, context): """Claims and processes Taskcluster work. Args: context (scriptworker.context.Context): context of worker Returns: status code of build """ try: # Note: claim_work(...) might not be safely interruptible! See # https://bugzilla.mozilla.org/show_bug.cgi?id=1524069 tasks = await self._run_cancellable(claim_work(context)) if not tasks or not tasks.get("tasks", []): await self._run_cancellable( asyncio.sleep(context.config["poll_interval"])) return None # Assume only a single task, but should more than one fall through, # run them sequentially. A side effect is our return status will # be the status of the final task run. status = None for task_defn in tasks.get("tasks", []): prepare_to_run_task(context, task_defn) reclaim_fut = context.event_loop.create_task( reclaim_task(context, context.task)) try: status = await do_run_task(context, self._run_cancellable, self._to_cancellable_process) artifacts_paths = filepaths_in_dir( context.config["artifact_dir"]) except WorkerShutdownDuringTask: shutdown_artifact_paths = [ os.path.join("public", "logs", log_file) for log_file in ["chain_of_trust.log", "live_backing.log"] ] artifacts_paths = [ path for path in shutdown_artifact_paths if os.path.isfile( os.path.join(context.config["artifact_dir"], path)) ] status = STATUSES["worker-shutdown"] status = worst_level(status, await do_upload(context, artifacts_paths)) await complete_task(context, status) reclaim_fut.cancel() cleanup(context) return status except asyncio.CancelledError: return None
def get_cot_artifacts(context): """Generate the artifact relative paths and shas for the chain of trust Args: context (scriptworker.context.Context): the scriptworker context. Returns: dict: a dictionary of {"path/to/artifact": {"hash_alg": "..."}, ...} """ artifacts = {} filepaths = filepaths_in_dir(context.config['artifact_dir']) hash_alg = context.config['chain_of_trust_hash_algorithm'] for filepath in sorted(filepaths): path = os.path.join(context.config['artifact_dir'], filepath) sha = get_hash(path, hash_alg=hash_alg) artifacts[filepath] = {hash_alg: sha} return artifacts
def get_cot_artifacts(context): """Generate the artifact relative paths and shas for the chain of trust. Args: context (scriptworker.context.Context): the scriptworker context. Returns: dict: a dictionary of {"path/to/artifact": {"hash_alg": "..."}, ...} """ artifacts = {} filepaths = filepaths_in_dir(context.config['artifact_dir']) hash_alg = context.config['chain_of_trust_hash_algorithm'] for filepath in sorted(filepaths): path = os.path.join(context.config['artifact_dir'], filepath) sha = get_hash(path, hash_alg=hash_alg) artifacts[filepath] = {hash_alg: sha} return artifacts
async def invoke(self, context): """Claims and processes Taskcluster work. Args: context (scriptworker.context.Context): context of worker Returns: status code of build """ try: # Note: claim_work(...) might not be safely interruptible! See # https://bugzilla.mozilla.org/show_bug.cgi?id=1524069 tasks = await self._run_cancellable(claim_work(context)) if not tasks or not tasks.get('tasks', []): await self._run_cancellable(asyncio.sleep(context.config['poll_interval'])) return None # Assume only a single task, but should more than one fall through, # run them sequentially. A side effect is our return status will # be the status of the final task run. status = None for task_defn in tasks.get('tasks', []): prepare_to_run_task(context, task_defn) reclaim_fut = context.event_loop.create_task(reclaim_task(context, context.task)) try: status = await do_run_task(context, self._run_cancellable, self._to_cancellable_process) artifacts_paths = filepaths_in_dir(context.config['artifact_dir']) except WorkerShutdownDuringTask: shutdown_artifact_paths = [os.path.join('public', 'logs', log_file) for log_file in ['chain_of_trust.log', 'live_backing.log']] artifacts_paths = [path for path in shutdown_artifact_paths if os.path.isfile(os.path.join(context.config['artifact_dir'], path))] status = STATUSES['worker-shutdown'] status = worst_level(status, await do_upload(context, artifacts_paths)) await complete_task(context, status) reclaim_fut.cancel() cleanup(context) return status except asyncio.CancelledError: return None
async def upload_artifacts(context): """Compress and upload the files in ``artifact_dir``, preserving relative paths. Compression only occurs with files known to be supported. This function expects the directory structure in ``artifact_dir`` to remain the same. So if we want the files in ``public/...``, create an ``artifact_dir/public`` and put the files in there. Args: context (scriptworker.context.Context): the scriptworker context. Raises: Exception: any exceptions the tasks raise. """ file_list = {} for target_path in filepaths_in_dir(context.config['artifact_dir']): path = os.path.join(context.config['artifact_dir'], target_path) content_type, content_encoding = compress_artifact_if_supported(path) file_list[target_path] = { 'path': path, 'target_path': target_path, 'content_type': content_type, 'content_encoding': content_encoding, } tasks = [] for upload_config in file_list.values(): tasks.append( asyncio.ensure_future( retry_create_artifact( context, upload_config['path'], target_path=upload_config['target_path'], content_type=upload_config['content_type'], content_encoding=upload_config['content_encoding'], ) ) ) await raise_future_exceptions(tasks)