예제 #1
0
def mock_config():
    """
    Mock configuration for bot
    Using try source
    """
    os.environ["TRY_TASK_ID"] = "remoteTryTask"
    os.environ["TRY_TASK_GROUP_ID"] = "remoteTryGroup"
    third_party = ["test/dummy/", "3rdparty/"]

    path = os.path.join(MOCK_DIR, "config.yaml")
    responses.add(
        responses.GET,
        "https://hg.mozilla.org/mozilla-central/raw-file/tip/tools/clang-tidy/config.yaml",
        body=open(path).read(),
        content_type="text/plain",
    )
    responses.add(
        responses.GET,
        "https://hg.mozilla.org/mozilla-central/raw-file/tip/3rdparty.txt",
        body="\n".join(third_party),
        content_type="text/plain",
    )

    from code_review_bot.config import settings

    settings.config = None
    settings.setup("test", "IN_PATCH", ["dom/*", "tests/*.py", "test/*.c"])
    return settings
예제 #2
0
def test_publication(monkeypatch, mock_taskcluster_config):
    """
    Check a patch publication through Taskcluster services
    """

    # Setup local config as running in a real Taskcluster task with proxy
    monkeypatch.setenv("TASK_ID", "fakeTaskId")
    monkeypatch.setenv("RUN_ID", "0")
    monkeypatch.setenv("TASKCLUSTER_PROXY_URL", "http://proxy")
    settings.setup("test", [])

    # Mock the storage response
    responses.add(
        responses.PUT,
        "http://storage.test/public/patch/mock-analyzer-test-improvement.diff",
        json={},
        headers={"ETag": "test123"},
    )

    patch = ImprovementPatch("mock-analyzer", "test-improvement", "This is good code")
    assert patch.url is None

    patch.publish()
    assert (
        patch.url
        == "https://firefox-ci-tc.services.mozilla.com/api/queue/v1/task/fakeTaskId/runs/0/artifacts/public/patch/mock-analyzer-test-improvement.diff"
    )

    # Check the mock has been called
    assert [c.request.url for c in responses.calls] == [
        "http://storage.test/public/patch/mock-analyzer-test-improvement.diff"
    ]
예제 #3
0
def mock_config():
    """
    Mock configuration for bot
    Using try source
    """
    # Make sure we are running in local mode
    if "TASK_ID" in os.environ:
        del os.environ["TASK_ID"]
    os.environ["TRY_TASK_ID"] = "remoteTryTask"
    os.environ["TRY_TASK_GROUP_ID"] = "remoteTryGroup"
    settings.setup("test", ["dom/*", "tests/*.py", "test/*.c"])
    return settings
예제 #4
0
def main():

    args = parse_cli()
    taskcluster.auth(args.taskcluster_client_id, args.taskcluster_access_token)

    taskcluster.load_secrets(
        args.taskcluster_secret,
        prefixes=["common", "code-review-bot", "bot"],
        required=(
            "APP_CHANNEL",
            "REPORTERS",
            "PHABRICATOR",
            "ALLOWED_PATHS",
            "repositories",
        ),
        existing={
            "APP_CHANNEL": "development",
            "REPORTERS": [],
            "ZERO_COVERAGE_ENABLED": True,
            "ALLOWED_PATHS": ["*"],
            "task_failures_ignored": [],
        },
        local_secrets=yaml.safe_load(args.configuration)
        if args.configuration
        else None,
    )

    init_logger(
        "bot",
        channel=taskcluster.secrets.get("APP_CHANNEL", "dev"),
        PAPERTRAIL_HOST=taskcluster.secrets.get("PAPERTRAIL_HOST"),
        PAPERTRAIL_PORT=taskcluster.secrets.get("PAPERTRAIL_PORT"),
        SENTRY_DSN=taskcluster.secrets.get("SENTRY_DSN"),
    )

    # Setup settings before stats
    settings.setup(
        taskcluster.secrets["APP_CHANNEL"],
        taskcluster.secrets["ALLOWED_PATHS"],
        taskcluster.secrets["repositories"],
    )
    # Setup statistics
    influx_conf = taskcluster.secrets.get("influxdb")
    if influx_conf:
        stats.auth(influx_conf)

    # Load reporters
    reporters = get_reporters(taskcluster.secrets["REPORTERS"])

    # Load index service
    index_service = taskcluster.get_service("index")

    # Load queue service
    queue_service = taskcluster.get_service("queue")

    # Load Phabricator API
    phabricator = taskcluster.secrets["PHABRICATOR"]
    phabricator_reporting_enabled = "phabricator" in reporters
    phabricator_api = PhabricatorAPI(phabricator["api_key"], phabricator["url"])
    if phabricator_reporting_enabled:
        reporters["phabricator"].setup_api(phabricator_api)

    # Load unique revision
    try:
        if settings.autoland_group_id:
            revision = Revision.from_autoland(
                queue_service.task(settings.autoland_group_id), phabricator_api
            )
        else:
            revision = Revision.from_try(
                queue_service.task(settings.try_task_id), phabricator_api
            )
    except Exception as e:
        # Report revision loading failure on production only
        # On testing or dev instances, we can use different Phabricator
        # configuration that do not match all the pulse messages sent
        if settings.on_production:
            raise

        else:
            logger.info(
                "Failed to load revision",
                task=settings.try_task_id,
                error=str(e),
                phabricator=phabricator["url"],
            )
            return 1

    # Run workflow according to source
    w = Workflow(
        reporters,
        index_service,
        queue_service,
        phabricator_api,
        taskcluster.secrets["ZERO_COVERAGE_ENABLED"],
        # Update build status only when phabricator reporting is enabled
        update_build=phabricator_reporting_enabled,
        task_failures_ignored=taskcluster.secrets["task_failures_ignored"],
    )
    try:
        if revision.repository == REPO_AUTOLAND:
            w.ingest_autoland(revision)
        else:
            w.run(revision)
    except Exception as e:
        # Log errors to papertrail
        logger.error("Static analysis failure", revision=revision, error=e)

        # Index analysis state
        extras = {}
        if isinstance(e, AnalysisException):
            extras["error_code"] = e.code
            extras["error_message"] = str(e)
        w.index(revision, state="error", **extras)

        # Update Harbormaster status
        w.update_status(revision, state=BuildState.Fail)

        # Then raise to mark task as erroneous
        raise

    return 0
예제 #5
0
def main():

    args = parse_cli()
    taskcluster.auth(args.taskcluster_client_id, args.taskcluster_access_token)
    taskcluster.load_secrets(
        name=args.taskcluster_secret,
        project_name=config.PROJECT_NAME,
        required=("APP_CHANNEL", "REPORTERS", "PHABRICATOR", "ALLOWED_PATHS"),
        existing={
            "APP_CHANNEL": "development",
            "REPORTERS": [],
            "PUBLICATION": "IN_PATCH",
            "ZERO_COVERAGE_ENABLED": True,
            "ALLOWED_PATHS": ["*"],
        },
    )

    init_logger(
        config.PROJECT_NAME,
        PAPERTRAIL_HOST=taskcluster.secrets.get("PAPERTRAIL_HOST"),
        PAPERTRAIL_PORT=taskcluster.secrets.get("PAPERTRAIL_PORT"),
        SENTRY_DSN=taskcluster.secrets.get("SENTRY_DSN"),
    )

    # Setup settings before stats
    settings.setup(
        taskcluster.secrets["APP_CHANNEL"],
        taskcluster.secrets["PUBLICATION"],
        taskcluster.secrets["ALLOWED_PATHS"],
    )
    # Setup statistics
    datadog_api_key = taskcluster.secrets.get("DATADOG_API_KEY")
    if datadog_api_key:
        stats.auth(datadog_api_key)

    # Load reporters
    reporters = get_reporters(taskcluster.secrets["REPORTERS"])

    # Load index service
    index_service = taskcluster.get_service("index")

    # Load queue service
    queue_service = taskcluster.get_service("queue")

    # Load Phabricator API
    phabricator = taskcluster.secrets["PHABRICATOR"]
    phabricator_reporting_enabled = "phabricator" in reporters
    phabricator_api = PhabricatorAPI(phabricator["api_key"], phabricator["url"])
    if phabricator_reporting_enabled:
        reporters["phabricator"].setup_api(phabricator_api)

    # Load unique revision
    revision = Revision(
        phabricator_api,
        try_task=queue_service.task(settings.try_task_id),
        # Update build status only when phabricator reporting is enabled
        update_build=phabricator_reporting_enabled,
    )

    # Run workflow according to source
    w = Workflow(
        reporters,
        index_service,
        queue_service,
        phabricator_api,
        taskcluster.secrets["ZERO_COVERAGE_ENABLED"],
    )
    try:
        w.run(revision)
    except Exception as e:
        # Log errors to papertrail
        logger.error("Static analysis failure", revision=revision, error=e)

        # Index analysis state
        extras = {}
        if isinstance(e, AnalysisException):
            extras["error_code"] = e.code
            extras["error_message"] = str(e)
        w.index(revision, state="error", **extras)

        # Update Harbormaster status
        revision.update_status(state=BuildState.Fail)

        # Then raise to mark task as erroneous
        raise