コード例 #1
0
def get_destination_for_partner_repack_path(context, manifest, full_path, locale):
    """Function to process the final destination path, relative to the root of
    the S3 bucket. Depending on whether it's a private or public destination, it
    performs several string manipulations.

    Input: 'releng/partner/ghost/ghost-var/v1/linux-i686/ro/target.tar.bz2'
    Possible ouput(s):
        -> ghost/59.0b20-2/ghost-variant/v1/linux-i686/en-US/firefox-59.0b20.tar.bz2
        -> pub/firefox/candidates/59.0b20-candidates/build2/partner-repacks/ghost/ghost-variant/v1/linux-i686/en-US/firefox-59.0b20.tar.bz2
    """
    # make sure we're calling this function from private-partner context
    if not is_partner_action(context.action):
        raise ScriptWorkerRetryException("Outside of private-partner context!")

    # pretty name the `target` part to the actual filename
    pretty_full_path = os.path.join(locale, manifest["mapping"][locale][os.path.basename(full_path)])

    build_number = context.task["payload"]["build_number"]
    version = context.task["payload"]["version"]

    if is_partner_private_task(context):
        sanity_check_partner_path(locale, {"version": version, "build_number": build_number}, PARTNER_REPACK_PRIVATE_REGEXES)
        return pretty_full_path
    elif is_partner_public_task(context):
        sanity_check_partner_path(locale, {"version": version, "build_number": build_number}, PARTNER_REPACK_PUBLIC_REGEXES)
        prefix = PARTNER_REPACK_PUBLIC_PREFIX_TMPL.format(version=version, build_number=build_number)
        return os.path.join(prefix, pretty_full_path)
コード例 #2
0
async def claim_task(context, taskId, runId):
    """Attempt to claim a task that we found in the Azure queue.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        taskId (str): the taskcluster taskId to claim
        runId (int): the taskcluster runId to claim

    Returns:
        dict: claimTask definition, if successful.  If unsuccessful, return None.
    """
    payload = {
        'workerGroup': context.config['worker_group'],
        'workerId': context.config['worker_id'],
    }
    try:
        result = await context.queue.claimTask(taskId, runId, payload)
        return result
    except taskcluster.exceptions.TaskclusterFailure as exc:
        log.debug("Got %s" % exc)
        if hasattr(exc, 'status_code') and exc.status_code == 409:
            # 409 means we found a task that's claimed by another worker
            # or cancelled.  Let's return None and delete it from Azure
            # so we don't get a backlog of bogus tasks in the queue.
            log.debug("Got %s" % exc)
            return None
        else:
            raise ScriptWorkerRetryException(str(exc))
コード例 #3
0
ファイル: utils.py プロジェクト: garbas/scriptworker
async def request(context,
                  url,
                  timeout=60,
                  method='get',
                  good=(200, ),
                  retry=tuple(range(500, 512)),
                  return_type='text',
                  **kwargs):
    """Async aiohttp request wrapper
    """
    session = context.session
    with aiohttp.Timeout(timeout):
        log.debug("{} {}".format(method.upper(), url))
        async with session.request(method, url, **kwargs) as resp:
            log.debug("Status {}".format(resp.status))
            message = "Bad status {}".format(resp.status)
            if resp.status in retry:
                raise ScriptWorkerRetryException(message)
            if resp.status not in good:
                raise ScriptWorkerException(message)
            if return_type == 'text':
                return await resp.text()
            elif return_type == 'json':
                return await resp.json()
            else:
                return resp
コード例 #4
0
async def create_artifact(context,
                          path,
                          target_path,
                          content_type,
                          content_encoding,
                          storage_type='s3',
                          expires=None):
    """Create an artifact and upload it.

    This should support s3 and azure out of the box; we'll need some tweaking
    if we want to support redirect/error artifacts.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        path (str): the path of the file to upload.
        target_path (str):
        content_type (str): Content type (MIME type) of the artifact. Values can be found via
            scriptworker.artifacts.guess_content_type_and_encoding()
        content_encoding (str): Encoding (per mimetypes' library) of the artifact. None is for no encoding. Values can
            be found via scriptworker.artifacts.guess_content_type_and_encoding()
        storage_type (str, optional): the taskcluster storage type to use.
            Defaults to 's3'
        expires (str, optional): datestring of when the artifact expires.
            Defaults to None.

    Raises:
        ScriptWorkerRetryException: on failure.

    """
    payload = {
        "storageType": storage_type,
        "expires": expires or get_expiration_arrow(context).isoformat(),
        "contentType": content_type,
    }
    args = [
        get_task_id(context.claim_task),
        get_run_id(context.claim_task), target_path, payload
    ]

    tc_response = await context.temp_queue.createArtifact(*args)
    skip_auto_headers = [aiohttp.hdrs.CONTENT_TYPE]
    loggable_url = get_loggable_url(tc_response['putUrl'])
    log.info("uploading {path} to {url}...".format(path=path,
                                                   url=loggable_url))
    with open(path, "rb") as fh:
        async with async_timeout.timeout(
                context.config['artifact_upload_timeout']):
            async with context.session.put(tc_response['putUrl'],
                                           data=fh,
                                           headers=_craft_artifact_put_headers(
                                               content_type, content_encoding),
                                           skip_auto_headers=skip_auto_headers,
                                           compress=False) as resp:
                log.info("create_artifact {}: {}".format(path, resp.status))
                response_text = await resp.text()
                log.info(response_text)
                if resp.status not in (200, 204):
                    raise ScriptWorkerRetryException(
                        "Bad status {}".format(resp.status), )
コード例 #5
0
async def put(context, url, headers, abs_filename, session=None):
    session = session or context.session
    with open(abs_filename, "rb") as fh:
        async with session.put(url, data=fh, headers=headers, compress=False) as resp:
            log.info("put {}: {}".format(abs_filename, resp.status))
            response_text = await resp.text()
            if response_text:
                log.info(response_text)
            if resp.status not in (200, 204):
                raise ScriptWorkerRetryException("Bad status {}".format(resp.status))
    return resp
コード例 #6
0
async def create_artifact(context,
                          path,
                          target_path,
                          storage_type='s3',
                          expires=None,
                          content_type=None):
    """Create an artifact and upload it.

    This should support s3 and azure out of the box; we'll need some tweaking
    if we want to support redirect/error artifacts.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        path (str): the path of the file to upload.
        target_path (str):
        storage_type (str, optional): the taskcluster storage type to use.
            Defaults to 's3'
        expires (str, optional): datestring of when the artifact expires.
            Defaults to None.
        content_type (str, optional): Specify the content type of the artifact.
            If None, use guess_content_type().  Defaults to None.

    Raises:
        ScriptWorkerRetryException: on failure.
    """
    payload = {
        "storageType": storage_type,
        "expires": expires or get_expiration_arrow(context).isoformat(),
        "contentType": content_type or guess_content_type(path),
    }
    args = [
        context.claim_task['status']['taskId'], context.claim_task['runId'],
        target_path, payload
    ]
    tc_response = await context.temp_queue.createArtifact(*args)
    headers = {
        aiohttp.hdrs.CONTENT_TYPE: tc_response['contentType'],
    }
    skip_auto_headers = [aiohttp.hdrs.CONTENT_TYPE]
    log.info("uploading {path} to {url}...".format(path=path,
                                                   url=tc_response['putUrl']))
    with open(path, "rb") as fh:
        with aiohttp.Timeout(context.config['artifact_upload_timeout']):
            async with context.session.put(tc_response['putUrl'],
                                           data=fh,
                                           headers=headers,
                                           skip_auto_headers=skip_auto_headers,
                                           compress=False) as resp:
                log.info(resp.status)
                response_text = await resp.text()
                log.info(response_text)
                if resp.status not in (200, 204):
                    raise ScriptWorkerRetryException(
                        "Bad status {}".format(resp.status), )
コード例 #7
0
ファイル: utils.py プロジェクト: Callek/scriptworker
async def request(context,
                  url,
                  timeout=60,
                  method='get',
                  good=(200, ),
                  retry=tuple(range(500, 512)),
                  return_type='text',
                  **kwargs):
    """Async aiohttp request wrapper.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        url (str): the url to request
        timeout (int, optional): timeout after this many seconds. Default is 60.
        method (str, optional): The request method to use.  Default is 'get'.
        good (list, optional): the set of good status codes.  Default is (200, )
        retry (list, optional): the set of status codes that result in a retry.
            Default is tuple(range(500, 512)).
        return_type (str, optional): The type of value to return.  Takes
            'json' or 'text'; other values will return the response object.
            Default is text.
        **kwargs: the kwargs to send to the aiohttp request function.

    Returns:
        object: the response text() if return_type is 'text'; the response
            json() if return_type is 'json'; the aiohttp request response
            object otherwise.

    Raises:
        ScriptWorkerRetryException: if the status code is in the retry list.
        ScriptWorkerException: if the status code is not in the retry list or
            good list.

    """
    session = context.session
    loggable_url = get_loggable_url(url)
    async with async_timeout.timeout(timeout):
        log.debug("{} {}".format(method.upper(), loggable_url))
        async with session.request(method, url, **kwargs) as resp:
            log.debug("Status {}".format(resp.status))
            message = "Bad status {}".format(resp.status)
            if resp.status in retry:
                raise ScriptWorkerRetryException(message)
            if resp.status not in good:
                raise ScriptWorkerException(message)
            if return_type == 'text':
                return await resp.text()
            elif return_type == 'json':
                return await resp.json()
            else:
                return resp
コード例 #8
0
ファイル: script.py プロジェクト: Callek/beetmoverscript
def get_destination_for_private_repack_path(context, manifest, full_path,
                                            locale):
    """Function to process the final destination path, relative to the root of
    the S3 bucket. Depending on whether it's a private or public destination, it
    performs several string manipulations.

    Input: 'releng/partner/ghost/ghost-var/v1/linux-i686/ro/target.tar.bz2'
    Possible ouput(s):
        -> ghost/59.0b20-2/ghost-variant/v1/linux-i686/en-US/firefox-59.0b20.tar.bz2
        -> pub/firefox/candidates/59.0b20-candidates/build2/partner-repacks/ghost/ghost-variant/v1/linux-i686/en-US/firefox-59.0b20.tar.bz2
    """
    # make sure we're calling this function from private-partner context
    if not is_partner_action(context.action):
        raise ScriptWorkerRetryException("Outside of private-partner context!")

    # pretty name the `target` part to the actual filename
    pretty_full_path = os.path.join(
        os.path.dirname(full_path),
        manifest['mapping'][locale][os.path.basename(full_path)])
    # get rid of leading "releng/partner"
    if pretty_full_path.startswith(PARTNER_LEADING_STRING):
        pretty_full_path = pretty_full_path[len(PARTNER_LEADING_STRING):]

    build_number = context.task['payload']['build_number']
    version = context.task['payload']['version']

    if is_partner_private_task(context):
        elements = pretty_full_path.split('/')
        identifier = '{version}-{build_number}'.format(
            version=version, build_number=build_number)
        # we need need to manually insert the "version-buildno" identifier in
        # between `partner` # and `partner-variant` to be consistent
        elements.insert(1, identifier)
        # TODO: potentially need to remove the `v1` from the path?
        path = '/'.join(elements)
        # XXX: temp hack until bug 1447673 is solved
        if context.bucket == "dep":
            prefix = PARTNER_REPACK_PUBLIC_PREFIX_TMPL.format(
                version=version, build_number=build_number)
            path = os.path.join(prefix, path)
        return path
    elif is_partner_public_task(context):
        prefix = PARTNER_REPACK_PUBLIC_PREFIX_TMPL.format(
            version=version, build_number=build_number)
        return os.path.join(prefix, pretty_full_path)
コード例 #9
0
async def create_artifact(context,
                          path,
                          storage_type='s3',
                          expires=None,
                          content_type=None):
    """Create an artifact and upload it.  This should support s3 and azure
    out of the box; we'll need some tweaking if we want to support
    redirect/error artifacts.
    """
    temp_queue = get_temp_queue(context)
    filename = os.path.basename(path)
    payload = {
        "storageType": storage_type,
        "expires": expires or get_expiration_arrow(context).isoformat(),
        "contentType": content_type or guess_content_type(path),
    }
    target_path = "public/env/{}".format(filename)
    args = [
        context.claim_task['status']['taskId'], context.claim_task['runId'],
        target_path, payload
    ]
    tc_response = await temp_queue.createArtifact(*args)
    headers = {
        aiohttp.hdrs.CONTENT_TYPE: tc_response['contentType'],
    }
    skip_auto_headers = [aiohttp.hdrs.CONTENT_TYPE]
    log.info("uploading {path} to {url}...".format(path=path,
                                                   url=tc_response['putUrl']))
    with open(path, "rb") as fh:
        with aiohttp.Timeout(context.config['artifact_upload_timeout']):
            async with context.session.put(tc_response['putUrl'],
                                           data=fh,
                                           headers=headers,
                                           skip_auto_headers=skip_auto_headers,
                                           compress=False) as resp:
                log.info(resp.status)
                response_text = await resp.text()
                log.info(response_text)
                if resp.status not in (200, 204):
                    raise ScriptWorkerRetryException(
                        "Bad status {}".format(resp.status), )
コード例 #10
0
async def fail_first(*args, **kwargs):
    global retry_count
    retry_count["fail_first"] += 1
    if retry_count["fail_first"] < 2:
        raise ScriptWorkerRetryException("first")
    return "yay"
コード例 #11
0
ファイル: test_poll.py プロジェクト: indygreg/scriptworker
 async def req(_, url, **kwargs):
     if url == "poll":
         return await fake_request()
     if raises:
         raise ScriptWorkerRetryException("died in req")
コード例 #12
0
ファイル: test_poll.py プロジェクト: indygreg/scriptworker
 async def claim(*args, **kwargs):
     if counters['claim_task']:
         return None
     counters['claim_task'] = counters['claim_task'] + 1
     raise ScriptWorkerRetryException("died in claim")