Ejemplo n.º 1
0
def main():
    args = parse_cli()
    taskcluster_config.auth(args.taskcluster_client_id,
                            args.taskcluster_access_token)
    taskcluster_config.load_secrets(
        args.taskcluster_secret,
        "events",
        required=("admins", "PHABRICATOR", "repositories"),
        existing=dict(
            admins=["*****@*****.**", "*****@*****.**"],
            repositories=[],
            user_blacklist=[],
        ),
        local_secrets=yaml.safe_load(args.configuration)
        if args.configuration else None,
    )

    init_logger(
        "events",
        channel=taskcluster_config.secrets.get("APP_CHANNEL", "dev"),
        PAPERTRAIL_HOST=taskcluster_config.secrets.get("PAPERTRAIL_HOST"),
        PAPERTRAIL_PORT=taskcluster_config.secrets.get("PAPERTRAIL_PORT"),
        SENTRY_DSN=taskcluster_config.secrets.get("SENTRY_DSN"),
    )

    events = Events(args.cache_root)
    events.run()
Ejemplo n.º 2
0
def main():
    args = parse_cli()
    taskcluster_config.auth(args.taskcluster_client_id,
                            args.taskcluster_access_token)
    taskcluster_config.load_secrets(
        args.taskcluster_secret,
        prefixes=["common", "events"],
        required=("admins", "PHABRICATOR", "repositories"),
        existing=dict(
            APP_CHANNEL="development",
            admins=["*****@*****.**", "*****@*****.**"],
            repositories=[],
            user_blacklist=[],
            autoland_enabled=False,
            skippable_files=[],
        ),
        local_secrets=yaml.safe_load(args.configuration)
        if args.configuration else None,
    )

    community_config = taskcluster_config.secrets.get("taskcluster_community")
    if community_config is not None:
        community_taskcluster_config.auth(community_config["client_id"],
                                          community_config["access_token"])

    init_logger(
        "events",
        channel=taskcluster_config.secrets.get("APP_CHANNEL", "dev"),
        PAPERTRAIL_HOST=taskcluster_config.secrets.get("PAPERTRAIL_HOST"),
        PAPERTRAIL_PORT=taskcluster_config.secrets.get("PAPERTRAIL_PORT"),
        SENTRY_DSN=taskcluster_config.secrets.get("SENTRY_DSN"),
    )

    events = Events(args.cache_root)
    events.run()
Ejemplo n.º 3
0
def main():

    args = parse_cli()
    taskcluster.auth(args.taskcluster_client_id, args.taskcluster_access_token)

    taskcluster.load_secrets(
        args.taskcluster_secret,
        prefixes=["common", "code-review-bot", "bot"],
        required=(
            "APP_CHANNEL",
            "REPORTERS",
            "PHABRICATOR",
            "ALLOWED_PATHS",
            "repositories",
        ),
        existing={
            "APP_CHANNEL": "development",
            "REPORTERS": [],
            "ZERO_COVERAGE_ENABLED": True,
            "ALLOWED_PATHS": ["*"],
            "task_failures_ignored": [],
        },
        local_secrets=yaml.safe_load(args.configuration)
        if args.configuration
        else None,
    )

    init_logger(
        "bot",
        channel=taskcluster.secrets.get("APP_CHANNEL", "dev"),
        PAPERTRAIL_HOST=taskcluster.secrets.get("PAPERTRAIL_HOST"),
        PAPERTRAIL_PORT=taskcluster.secrets.get("PAPERTRAIL_PORT"),
        SENTRY_DSN=taskcluster.secrets.get("SENTRY_DSN"),
    )

    # Setup settings before stats
    settings.setup(
        taskcluster.secrets["APP_CHANNEL"],
        taskcluster.secrets["ALLOWED_PATHS"],
        taskcluster.secrets["repositories"],
    )
    # Setup statistics
    influx_conf = taskcluster.secrets.get("influxdb")
    if influx_conf:
        stats.auth(influx_conf)

    # Load reporters
    reporters = get_reporters(taskcluster.secrets["REPORTERS"])

    # Load index service
    index_service = taskcluster.get_service("index")

    # Load queue service
    queue_service = taskcluster.get_service("queue")

    # Load Phabricator API
    phabricator = taskcluster.secrets["PHABRICATOR"]
    phabricator_reporting_enabled = "phabricator" in reporters
    phabricator_api = PhabricatorAPI(phabricator["api_key"], phabricator["url"])
    if phabricator_reporting_enabled:
        reporters["phabricator"].setup_api(phabricator_api)

    # Load unique revision
    try:
        if settings.autoland_group_id:
            revision = Revision.from_autoland(
                queue_service.task(settings.autoland_group_id), phabricator_api
            )
        else:
            revision = Revision.from_try(
                queue_service.task(settings.try_task_id), phabricator_api
            )
    except Exception as e:
        # Report revision loading failure on production only
        # On testing or dev instances, we can use different Phabricator
        # configuration that do not match all the pulse messages sent
        if settings.on_production:
            raise

        else:
            logger.info(
                "Failed to load revision",
                task=settings.try_task_id,
                error=str(e),
                phabricator=phabricator["url"],
            )
            return 1

    # Run workflow according to source
    w = Workflow(
        reporters,
        index_service,
        queue_service,
        phabricator_api,
        taskcluster.secrets["ZERO_COVERAGE_ENABLED"],
        # Update build status only when phabricator reporting is enabled
        update_build=phabricator_reporting_enabled,
        task_failures_ignored=taskcluster.secrets["task_failures_ignored"],
    )
    try:
        if revision.repository == REPO_AUTOLAND:
            w.ingest_autoland(revision)
        else:
            w.run(revision)
    except Exception as e:
        # Log errors to papertrail
        logger.error("Static analysis failure", revision=revision, error=e)

        # Index analysis state
        extras = {}
        if isinstance(e, AnalysisException):
            extras["error_code"] = e.code
            extras["error_message"] = str(e)
        w.index(revision, state="error", **extras)

        # Update Harbormaster status
        w.update_status(revision, state=BuildState.Fail)

        # Then raise to mark task as erroneous
        raise

    return 0