Ejemplo n.º 1
0
def main(env):
    taskcluster.auth()
    hooks = taskcluster.get_service("hooks")

    # List all tasks on the env
    all_tasks = list_tasks(env)

    # List non erroneous tasks
    skip_phids = [t["diff_phid"] for t in filter(is_not_error, all_tasks)]

    # Get tasks with a mach failure
    tasks = list(filter(is_mach_failure, all_tasks))

    # Trigger all mach error tasks
    total = 0
    for task in tasks:
        phid = task["diff_phid"]
        print("Triggering {} > {}".format(phid, task["title"]))

        if phid in skip_phids:
            print(
                ">> Skipping, phid {} has already a non-erroneous task".format(
                    phid))
            continue

        extra_env = {"ANALYSIS_SOURCE": "phabricator", "ANALYSIS_ID": phid}
        task = hooks.triggerHook("project-releng",
                                 "services-{}-staticanalysis/bot".format(env),
                                 extra_env)
        print(">> New task {}".format(task["status"]["taskId"]))
        total += 1

    print("Triggered {} tasks".format(total))
Ejemplo n.º 2
0
    def __init__(self, configuration):
        (self.emails, ) = self.requires(configuration, "emails")
        assert len(self.emails) > 0, "Missing emails data"

        # Load TC services & secrets
        self.notify = taskcluster.get_service("notify")

        logger.info("Mail report enabled", emails=self.emails)
Ejemplo n.º 3
0
    def matches(task_id):
        """
        Check if the default task can work on a task
        * Lookup the available latest artifacts
        * Check if any artifact matches the official default path
        """
        queue = taskcluster.get_service("queue")
        result = queue.listLatestArtifacts(task_id)
        if "artifacts" not in result:
            return False

        names = set(artifact["name"] for artifact in result["artifacts"])
        return len(names.intersection(DefaultTask.artifacts)) > 0
Ejemplo n.º 4
0
    def __init__(self, configuration):
        # Load TC services
        self.notify = taskcluster.get_service("notify")

        logger.info("BuildErrorsReporter report enabled.")
Ejemplo n.º 5
0
def main():

    args = parse_cli()
    taskcluster.auth(args.taskcluster_client_id, args.taskcluster_access_token)

    taskcluster.load_secrets(
        args.taskcluster_secret,
        prefixes=["common", "code-review-bot", "bot"],
        required=(
            "APP_CHANNEL",
            "REPORTERS",
            "PHABRICATOR",
            "ALLOWED_PATHS",
            "repositories",
        ),
        existing={
            "APP_CHANNEL": "development",
            "REPORTERS": [],
            "ZERO_COVERAGE_ENABLED": True,
            "ALLOWED_PATHS": ["*"],
            "task_failures_ignored": [],
        },
        local_secrets=yaml.safe_load(args.configuration)
        if args.configuration
        else None,
    )

    init_logger(
        "bot",
        channel=taskcluster.secrets.get("APP_CHANNEL", "dev"),
        PAPERTRAIL_HOST=taskcluster.secrets.get("PAPERTRAIL_HOST"),
        PAPERTRAIL_PORT=taskcluster.secrets.get("PAPERTRAIL_PORT"),
        SENTRY_DSN=taskcluster.secrets.get("SENTRY_DSN"),
    )

    # Setup settings before stats
    settings.setup(
        taskcluster.secrets["APP_CHANNEL"],
        taskcluster.secrets["ALLOWED_PATHS"],
        taskcluster.secrets["repositories"],
    )
    # Setup statistics
    influx_conf = taskcluster.secrets.get("influxdb")
    if influx_conf:
        stats.auth(influx_conf)

    # Load reporters
    reporters = get_reporters(taskcluster.secrets["REPORTERS"])

    # Load index service
    index_service = taskcluster.get_service("index")

    # Load queue service
    queue_service = taskcluster.get_service("queue")

    # Load Phabricator API
    phabricator = taskcluster.secrets["PHABRICATOR"]
    phabricator_reporting_enabled = "phabricator" in reporters
    phabricator_api = PhabricatorAPI(phabricator["api_key"], phabricator["url"])
    if phabricator_reporting_enabled:
        reporters["phabricator"].setup_api(phabricator_api)

    # Load unique revision
    try:
        if settings.autoland_group_id:
            revision = Revision.from_autoland(
                queue_service.task(settings.autoland_group_id), phabricator_api
            )
        else:
            revision = Revision.from_try(
                queue_service.task(settings.try_task_id), phabricator_api
            )
    except Exception as e:
        # Report revision loading failure on production only
        # On testing or dev instances, we can use different Phabricator
        # configuration that do not match all the pulse messages sent
        if settings.on_production:
            raise

        else:
            logger.info(
                "Failed to load revision",
                task=settings.try_task_id,
                error=str(e),
                phabricator=phabricator["url"],
            )
            return 1

    # Run workflow according to source
    w = Workflow(
        reporters,
        index_service,
        queue_service,
        phabricator_api,
        taskcluster.secrets["ZERO_COVERAGE_ENABLED"],
        # Update build status only when phabricator reporting is enabled
        update_build=phabricator_reporting_enabled,
        task_failures_ignored=taskcluster.secrets["task_failures_ignored"],
    )
    try:
        if revision.repository == REPO_AUTOLAND:
            w.ingest_autoland(revision)
        else:
            w.run(revision)
    except Exception as e:
        # Log errors to papertrail
        logger.error("Static analysis failure", revision=revision, error=e)

        # Index analysis state
        extras = {}
        if isinstance(e, AnalysisException):
            extras["error_code"] = e.code
            extras["error_message"] = str(e)
        w.index(revision, state="error", **extras)

        # Update Harbormaster status
        w.update_status(revision, state=BuildState.Fail)

        # Then raise to mark task as erroneous
        raise

    return 0
Ejemplo n.º 6
0
def main():

    args = parse_cli()
    taskcluster.auth(args.taskcluster_client_id, args.taskcluster_access_token)
    taskcluster.load_secrets(
        name=args.taskcluster_secret,
        project_name=config.PROJECT_NAME,
        required=("APP_CHANNEL", "REPORTERS", "PHABRICATOR", "ALLOWED_PATHS"),
        existing={
            "APP_CHANNEL": "development",
            "REPORTERS": [],
            "PUBLICATION": "IN_PATCH",
            "ZERO_COVERAGE_ENABLED": True,
            "ALLOWED_PATHS": ["*"],
        },
    )

    init_logger(
        config.PROJECT_NAME,
        PAPERTRAIL_HOST=taskcluster.secrets.get("PAPERTRAIL_HOST"),
        PAPERTRAIL_PORT=taskcluster.secrets.get("PAPERTRAIL_PORT"),
        SENTRY_DSN=taskcluster.secrets.get("SENTRY_DSN"),
    )

    # Setup settings before stats
    settings.setup(
        taskcluster.secrets["APP_CHANNEL"],
        taskcluster.secrets["PUBLICATION"],
        taskcluster.secrets["ALLOWED_PATHS"],
    )
    # Setup statistics
    datadog_api_key = taskcluster.secrets.get("DATADOG_API_KEY")
    if datadog_api_key:
        stats.auth(datadog_api_key)

    # Load reporters
    reporters = get_reporters(taskcluster.secrets["REPORTERS"])

    # Load index service
    index_service = taskcluster.get_service("index")

    # Load queue service
    queue_service = taskcluster.get_service("queue")

    # Load Phabricator API
    phabricator = taskcluster.secrets["PHABRICATOR"]
    phabricator_reporting_enabled = "phabricator" in reporters
    phabricator_api = PhabricatorAPI(phabricator["api_key"], phabricator["url"])
    if phabricator_reporting_enabled:
        reporters["phabricator"].setup_api(phabricator_api)

    # Load unique revision
    revision = Revision(
        phabricator_api,
        try_task=queue_service.task(settings.try_task_id),
        # Update build status only when phabricator reporting is enabled
        update_build=phabricator_reporting_enabled,
    )

    # Run workflow according to source
    w = Workflow(
        reporters,
        index_service,
        queue_service,
        phabricator_api,
        taskcluster.secrets["ZERO_COVERAGE_ENABLED"],
    )
    try:
        w.run(revision)
    except Exception as e:
        # Log errors to papertrail
        logger.error("Static analysis failure", revision=revision, error=e)

        # Index analysis state
        extras = {}
        if isinstance(e, AnalysisException):
            extras["error_code"] = e.code
            extras["error_message"] = str(e)
        w.index(revision, state="error", **extras)

        # Update Harbormaster status
        revision.update_status(state=BuildState.Fail)

        # Then raise to mark task as erroneous
        raise