Exemple #1
0
async def create_artifact(context,
                          path,
                          target_path,
                          content_type,
                          content_encoding,
                          storage_type='s3',
                          expires=None):
    """Create an artifact and upload it.

    This should support s3 and azure out of the box; we'll need some tweaking
    if we want to support redirect/error artifacts.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        path (str): the path of the file to upload.
        target_path (str):
        content_type (str): Content type (MIME type) of the artifact. Values can be found via
            scriptworker.artifacts.guess_content_type_and_encoding()
        content_encoding (str): Encoding (per mimetypes' library) of the artifact. None is for no encoding. Values can
            be found via scriptworker.artifacts.guess_content_type_and_encoding()
        storage_type (str, optional): the taskcluster storage type to use.
            Defaults to 's3'
        expires (str, optional): datestring of when the artifact expires.
            Defaults to None.

    Raises:
        ScriptWorkerRetryException: on failure.

    """
    payload = {
        "storageType": storage_type,
        "expires": expires or get_expiration_arrow(context).isoformat(),
        "contentType": content_type,
    }
    args = [
        get_task_id(context.claim_task),
        get_run_id(context.claim_task), target_path, payload
    ]

    tc_response = await context.temp_queue.createArtifact(*args)
    skip_auto_headers = [aiohttp.hdrs.CONTENT_TYPE]
    loggable_url = get_loggable_url(tc_response['putUrl'])
    log.info("uploading {path} to {url}...".format(path=path,
                                                   url=loggable_url))
    with open(path, "rb") as fh:
        async with async_timeout.timeout(
                context.config['artifact_upload_timeout']):
            async with context.session.put(tc_response['putUrl'],
                                           data=fh,
                                           headers=_craft_artifact_put_headers(
                                               content_type, content_encoding),
                                           skip_auto_headers=skip_auto_headers,
                                           compress=False) as resp:
                log.info("create_artifact {}: {}".format(path, resp.status))
                response_text = await resp.text()
                log.info(response_text)
                if resp.status not in (200, 204):
                    raise ScriptWorkerRetryException(
                        "Bad status {}".format(resp.status), )
async def create_artifact(context, path, target_path, content_type, content_encoding, storage_type='s3', expires=None):
    """Create an artifact and upload it.

    This should support s3 and azure out of the box; we'll need some tweaking
    if we want to support redirect/error artifacts.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        path (str): the path of the file to upload.
        target_path (str):
        content_type (str): Content type (MIME type) of the artifact. Values can be found via
            scriptworker.artifacts.guess_content_type_and_encoding()
        content_encoding (str): Encoding (per mimetypes' library) of the artifact. None is for no encoding. Values can
            be found via scriptworker.artifacts.guess_content_type_and_encoding()
        storage_type (str, optional): the taskcluster storage type to use.
            Defaults to 's3'
        expires (str, optional): datestring of when the artifact expires.
            Defaults to None.

    Raises:
        ScriptWorkerRetryException: on failure.

    """
    payload = {
        "storageType": storage_type,
        "expires": expires or get_expiration_arrow(context).isoformat(),
        "contentType": content_type,
    }
    args = [get_task_id(context.claim_task), get_run_id(context.claim_task),
            target_path, payload]

    tc_response = await context.temp_queue.createArtifact(*args)
    skip_auto_headers = [aiohttp.hdrs.CONTENT_TYPE]
    loggable_url = get_loggable_url(tc_response['putUrl'])
    log.info("uploading {path} to {url}...".format(path=path, url=loggable_url))
    with open(path, "rb") as fh:
        async with async_timeout.timeout(context.config['artifact_upload_timeout']):
            async with context.session.put(
                tc_response['putUrl'], data=fh, headers=_craft_artifact_put_headers(content_type, content_encoding),
                skip_auto_headers=skip_auto_headers, compress=False
            ) as resp:
                log.info("create_artifact {}: {}".format(path, resp.status))
                response_text = await resp.text()
                log.info(response_text)
                if resp.status not in (200, 204):
                    raise ScriptWorkerRetryException(
                        "Bad status {}".format(resp.status),
                    )
Exemple #3
0
    def __init__(self, context, name, task_id=None):
        """Initialize ChainOfTrust.

        Args:
            context (scriptworker.context.Context): the scriptworker context
            name (str): the name of the task (e.g., signing)
            task_id (str, optional): the task_id of the task.  If None, use
                ``get_task_id(context.claim_task)``.  Defaults to None.
        """
        self.name = name
        self.task_type = guess_task_type(name)
        self.context = context
        self.task_id = task_id or get_task_id(context.claim_task)
        self.task = context.task
        self.worker_impl = guess_worker_impl(
            self)  # this should be scriptworker
        self.decision_task_id = get_decision_task_id(self.task)
        self.links = []
Exemple #4
0
    async def invoke(self, context):
        """Claims and processes Taskcluster work.

        Args:
            context (scriptworker.context.Context): context of worker

        Returns: status code of build

        """
        try:
            # Note: claim_work(...) might not be safely interruptible! See
            # https://bugzilla.mozilla.org/show_bug.cgi?id=1524069
            tasks = await self._run_cancellable(claim_work(context))
            if not tasks or not tasks.get('tasks', []):
                await self._run_cancellable(
                    asyncio.sleep(context.config['poll_interval']))
                return None

            # Assume only a single task, but should more than one fall through,
            # run them sequentially.  A side effect is our return status will
            # be the status of the final task run.
            status = None
            for task_defn in tasks.get('tasks', []):
                prepare_to_run_task(context, task_defn)
                try:
                    taskId = get_task_id(task_defn)
                    runId = get_run_id(task_defn)
                except KeyError:
                    taskId = None
                    runId = None
                log_worker_metric(context,
                                  "taskStart",
                                  taskId=taskId,
                                  runId=runId)
                reclaim_fut = context.event_loop.create_task(
                    reclaim_task(context, context.task))
                try:
                    status = await do_run_task(context, self._run_cancellable,
                                               self._to_cancellable_process)
                    artifacts_paths = filepaths_in_dir(
                        context.config['artifact_dir'])
                except WorkerShutdownDuringTask:
                    shutdown_artifact_paths = [
                        os.path.join('public', 'logs', log_file) for log_file
                        in ['chain_of_trust.log', 'live_backing.log']
                    ]
                    artifacts_paths = [
                        path for path in shutdown_artifact_paths
                        if os.path.isfile(
                            os.path.join(context.config['artifact_dir'], path))
                    ]
                    status = STATUSES['worker-shutdown']
                status = worst_level(status, await
                                     do_upload(context, artifacts_paths))
                await complete_task(context, status)
                log_worker_metric(context,
                                  "taskFinish",
                                  taskId=taskId,
                                  runId=runId)
                reclaim_fut.cancel()
                cleanup(context)

            return status

        except asyncio.CancelledError:
            return None