Exemplo n.º 1
0
    def upload(self, report, changesets):
        results = self.generate(report, changesets)

        if secrets[secrets.PHABRICATOR_ENABLED]:
            phabricator = PhabricatorAPI(secrets[secrets.PHABRICATOR_TOKEN],
                                         secrets[secrets.PHABRICATOR_URL])
        else:
            phabricator = None

        for rev_id, coverage in results.items():
            # Only upload raw coverage data to Phabricator, not stats
            coverage = {
                path: cov["coverage"]
                for path, cov in coverage.items()
            }
            logger.info("{} coverage: {}".format(rev_id, coverage))

            if not phabricator or not coverage:
                continue

            try:
                rev_data = phabricator.load_revision(rev_id=rev_id)
                phabricator.upload_coverage_results(
                    rev_data["fields"]["diffPHID"], coverage)
                # XXX: This is only necessary until https://bugzilla.mozilla.org/show_bug.cgi?id=1487843 is resolved.
                phabricator.upload_lint_results(rev_data["fields"]["diffPHID"],
                                                BuildState.Pass, [])
            except PhabricatorRevisionNotFoundException:
                logger.warn("Phabricator revision not found", rev_id=rev_id)

        return results
Exemplo n.º 2
0
def get(rev_id):
    assert DEPLOYMENT_URL is not None
    assert API_KEY is not None

    phabricator_api = PhabricatorAPI(API_KEY, DEPLOYMENT_URL)
    return phabricator_api.load_revision(rev_id=rev_id,
                                         attachments={"projects": True})
Exemplo n.º 3
0
    def apply_phab(self, hg, diff_id):
        phabricator_api = PhabricatorAPI(
            api_key=get_secret("PHABRICATOR_TOKEN"),
            url=get_secret("PHABRICATOR_URL"))

        diffs = phabricator_api.search_diffs(diff_id=diff_id)
        assert len(diffs) == 1, "No diff available for {}".format(diff_id)
        diff = diffs[0]

        # Get the stack of patches
        base, patches = phabricator_api.load_patches_stack(hg, diff)
        assert len(patches) > 0, "No patches to apply"

        # Load all the diffs details with commits messages
        diffs = phabricator_api.search_diffs(diff_phid=[p[0] for p in patches],
                                             attachments={"commits": True})
        commits = {
            diff["phid"]: diff["attachments"]["commits"].get("commits", [])
            for diff in diffs
        }

        # First apply patches on local repo
        for diff_phid, patch in patches:
            commit = commits.get(diff_phid)

            message = ""
            if commit:
                message += "{}\n".format(commit[0]["message"])

            logger.info(f"Applying {diff_phid}")
            hg.import_(
                patches=io.BytesIO(patch.encode("utf-8")),
                message=message,
                user="******",
            )
Exemplo n.º 4
0
def main(taskcluster_secret,
         taskcluster_client_id,
         taskcluster_access_token,
         cache_root,
         phab_build_target,
         ):

    secrets = get_secrets(taskcluster_secret,
                          config.PROJECT_NAME,
                          required=(
                              'PULSE_USER',
                              'PULSE_PASSWORD',
                              'HOOKS',
                              'ADMINS',
                              'PHABRICATOR',
                              'repositories',
                          ),
                          existing=dict(
                              HOOKS=[],
                              ADMINS=['*****@*****.**', '*****@*****.**'],
                              repositories=[]
                          ),
                          taskcluster_client_id=taskcluster_client_id,
                          taskcluster_access_token=taskcluster_access_token,
                          )

    init_logger(config.PROJECT_NAME,
                PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'),
                PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'),
                SENTRY_DSN=secrets.get('SENTRY_DSN'),
                MOZDEF=secrets.get('MOZDEF'),
                )

    task_monitoring.emails = secrets['ADMINS']

    phabricator = PhabricatorAPI(
        api_key=secrets['PHABRICATOR']['token'],
        url=secrets['PHABRICATOR']['url'],
    )

    pl = PulseListener(secrets['PULSE_USER'],
                       secrets['PULSE_PASSWORD'],
                       secrets['HOOKS'],
                       secrets['repositories'],
                       phabricator,
                       cache_root,
                       secrets['PHABRICATOR'].get('publish', False),
                       taskcluster_client_id,
                       taskcluster_access_token,
                       )
    click.echo('Listening to pulse messages...')

    if phab_build_target:
        pl.add_build(phab_build_target)

    pl.run()
Exemplo n.º 5
0
    def apply_phab(self, hg, diff_id):
        phabricator_api = PhabricatorAPI(
            api_key=get_secret("PHABRICATOR_TOKEN"),
            url=get_secret("PHABRICATOR_URL"))

        diffs = phabricator_api.search_diffs(diff_id=diff_id)
        assert len(diffs) == 1, f"No diff available for {diff_id}"
        diff = diffs[0]

        # Get the stack of patches
        base, patches = phabricator_api.load_patches_stack(hg, diff)
        assert len(patches) > 0, "No patches to apply"

        # Load all the diffs details with commits messages
        diffs = phabricator_api.search_diffs(diff_phid=[p[0] for p in patches],
                                             attachments={"commits": True})

        diffs_data = {}
        for diff in diffs:
            revision = phabricator_api.load_revision(
                rev_phid=diff["revisionPHID"])
            logger.info("Diff {} linked to Revision {}".format(
                diff["id"], revision["id"]))

            diffs_data[diff["phid"]] = {
                "commits": diff["attachments"]["commits"].get("commits", []),
                "revision": revision,
            }

        # First apply patches on local repo
        for diff_phid, patch in patches:
            diff_data = diffs_data.get(diff_phid)

            commits = diff_data["commits"]
            revision = diff_data["revision"]

            if commits and commits[0]["message"]:
                message = commits[0]["message"]
            else:
                message = revision["fields"]["title"]

            logger.info(f"Applying {diff_phid}")
            hg.import_(
                patches=io.BytesIO(patch.encode("utf-8")),
                message=message,
                user="******",
            )
Exemplo n.º 6
0
    def __init__(self, url, api_key, retries=5, sleep=10):
        self.api = PhabricatorAPI(url=url, api_key=api_key)

        # Phabricator secure revision retries configuration
        assert isinstance(retries, int)
        assert isinstance(sleep, int)
        self.retries = collections.defaultdict(lambda: (retries, None))
        self.sleep = sleep
        logger.info('Will retry Phabricator secure revision queries',
                    retries=retries,
                    sleep=sleep)

        # Load secure projects
        projects = self.api.search_projects(slugs=['secure-revision'])
        self.secure_projects = {
            p['phid']: p['fields']['name']
            for p in projects
        }
        logger.info('Loaded secure projects',
                    projects=self.secure_projects.values())
Exemplo n.º 7
0
    def __init__(self, url, api_key, retries=5, sleep=10):
        self.api = PhabricatorAPI(url=url, api_key=api_key)

        # Phabricator secure revision retries configuration
        assert isinstance(retries, int)
        assert isinstance(sleep, int)
        self.max_retries = retries
        self.retries = collections.defaultdict(lambda: (retries, None))
        self.sleep = sleep
        logger.info(
            "Will retry Phabricator secure revision queries",
            retries=retries,
            sleep=sleep,
        )

        # Load secure projects
        projects = self.api.search_projects(slugs=["secure-revision"])
        self.secure_projects = {
            p["phid"]: p["fields"]["name"]
            for p in projects
        }
        logger.info("Loaded secure projects",
                    projects=self.secure_projects.values())
Exemplo n.º 8
0
    def upload(self, report, changesets=None):
        results = self.generate(report, changesets)

        if secrets[secrets.PHABRICATOR_ENABLED]:
            phabricator = PhabricatorAPI(secrets[secrets.PHABRICATOR_TOKEN],
                                         secrets[secrets.PHABRICATOR_URL])
        else:
            phabricator = None

        for rev_id, coverage in results.items():
            logger.info('{} coverage: {}'.format(rev_id, coverage))

            if not phabricator or not coverage:
                continue

            try:
                rev_data = phabricator.load_revision(rev_id=rev_id)
                phabricator.upload_coverage_results(
                    rev_data['fields']['diffPHID'], coverage)
                # XXX: This is only necessary until https://bugzilla.mozilla.org/show_bug.cgi?id=1487843 is resolved.
                phabricator.upload_lint_results(rev_data['fields']['diffPHID'],
                                                BuildState.Pass, [])
            except PhabricatorRevisionNotFoundException:
                logger.warn('Phabricator revision not found', rev_id=rev_id)
Exemplo n.º 9
0
def main():

    args = parse_cli()
    taskcluster.auth(args.taskcluster_client_id, args.taskcluster_access_token)

    taskcluster.load_secrets(
        args.taskcluster_secret,
        prefixes=["common", "code-review-bot", "bot"],
        required=(
            "APP_CHANNEL",
            "REPORTERS",
            "PHABRICATOR",
            "ALLOWED_PATHS",
            "repositories",
        ),
        existing={
            "APP_CHANNEL": "development",
            "REPORTERS": [],
            "ZERO_COVERAGE_ENABLED": True,
            "ALLOWED_PATHS": ["*"],
            "task_failures_ignored": [],
        },
        local_secrets=yaml.safe_load(args.configuration)
        if args.configuration
        else None,
    )

    init_logger(
        "bot",
        channel=taskcluster.secrets.get("APP_CHANNEL", "dev"),
        PAPERTRAIL_HOST=taskcluster.secrets.get("PAPERTRAIL_HOST"),
        PAPERTRAIL_PORT=taskcluster.secrets.get("PAPERTRAIL_PORT"),
        SENTRY_DSN=taskcluster.secrets.get("SENTRY_DSN"),
    )

    # Setup settings before stats
    settings.setup(
        taskcluster.secrets["APP_CHANNEL"],
        taskcluster.secrets["ALLOWED_PATHS"],
        taskcluster.secrets["repositories"],
    )
    # Setup statistics
    influx_conf = taskcluster.secrets.get("influxdb")
    if influx_conf:
        stats.auth(influx_conf)

    # Load reporters
    reporters = get_reporters(taskcluster.secrets["REPORTERS"])

    # Load index service
    index_service = taskcluster.get_service("index")

    # Load queue service
    queue_service = taskcluster.get_service("queue")

    # Load Phabricator API
    phabricator = taskcluster.secrets["PHABRICATOR"]
    phabricator_reporting_enabled = "phabricator" in reporters
    phabricator_api = PhabricatorAPI(phabricator["api_key"], phabricator["url"])
    if phabricator_reporting_enabled:
        reporters["phabricator"].setup_api(phabricator_api)

    # Load unique revision
    try:
        if settings.autoland_group_id:
            revision = Revision.from_autoland(
                queue_service.task(settings.autoland_group_id), phabricator_api
            )
        else:
            revision = Revision.from_try(
                queue_service.task(settings.try_task_id), phabricator_api
            )
    except Exception as e:
        # Report revision loading failure on production only
        # On testing or dev instances, we can use different Phabricator
        # configuration that do not match all the pulse messages sent
        if settings.on_production:
            raise

        else:
            logger.info(
                "Failed to load revision",
                task=settings.try_task_id,
                error=str(e),
                phabricator=phabricator["url"],
            )
            return 1

    # Run workflow according to source
    w = Workflow(
        reporters,
        index_service,
        queue_service,
        phabricator_api,
        taskcluster.secrets["ZERO_COVERAGE_ENABLED"],
        # Update build status only when phabricator reporting is enabled
        update_build=phabricator_reporting_enabled,
        task_failures_ignored=taskcluster.secrets["task_failures_ignored"],
    )
    try:
        if revision.repository == REPO_AUTOLAND:
            w.ingest_autoland(revision)
        else:
            w.run(revision)
    except Exception as e:
        # Log errors to papertrail
        logger.error("Static analysis failure", revision=revision, error=e)

        # Index analysis state
        extras = {}
        if isinstance(e, AnalysisException):
            extras["error_code"] = e.code
            extras["error_message"] = str(e)
        w.index(revision, state="error", **extras)

        # Update Harbormaster status
        w.update_status(revision, state=BuildState.Fail)

        # Then raise to mark task as erroneous
        raise

    return 0
Exemplo n.º 10
0
 def __init__(self):
     super(NotLanded, self).__init__()
     self.nweeks = utils.get_config(self.name(), "number_of_weeks", 2)
     self.nyears = utils.get_config(self.name(), "number_of_years", 2)
     self.phab = PhabricatorAPI(utils.get_login_info()["phab_api_key"])
     self.extra_ni = {}
Exemplo n.º 11
0
def set_api_key(url: str, api_key: str) -> None:
    global PHABRICATOR_API
    PHABRICATOR_API = PhabricatorAPI(api_key, url)
Exemplo n.º 12
0
    def apply_phab(self, hg, diff_id):
        def has_revision(revision):
            if not revision:
                return False
            try:
                hg.identify(revision)
                return True
            except hglib.error.CommandError:
                return False

        phabricator_api = PhabricatorAPI(
            api_key=get_secret("PHABRICATOR_TOKEN"), url=get_secret("PHABRICATOR_URL")
        )

        # Get the stack of patches
        stack = phabricator_api.load_patches_stack(diff_id)
        assert len(stack) > 0, "No patches to apply"

        # Find the first unknown base revision
        needed_stack = []
        revisions = {}
        for patch in reversed(stack):
            needed_stack.insert(0, patch)

            # Stop as soon as a base revision is available
            if has_revision(patch.base_revision):
                logger.info(
                    f"Stopping at diff {patch.id} and revision {patch.base_revision}"
                )
                break

        if not needed_stack:
            logger.info("All the patches are already applied")
            return

        # Load all the diff revisions
        diffs = phabricator_api.search_diffs(diff_phid=[p.phid for p in stack])
        revisions = {
            diff["phid"]: phabricator_api.load_revision(rev_phid=diff["revisionPHID"])
            for diff in diffs
        }

        # Update repo to base revision
        hg_base = needed_stack[0].base_revision
        if not has_revision(hg_base):
            logger.warning("Missing base revision {} from Phabricator".format(hg_base))
            hg_base = "tip"

        if hg_base:
            hg.update(rev=hg_base, clean=True)
            logger.info(f"Updated repo to {hg_base}")

            try:
                self.git_base = vcs_map.mercurial_to_git(hg_base)
                subprocess.run(
                    ["git", "checkout", "-b", "analysis_branch", self.git_base],
                    check=True,
                    cwd=self.git_repo_dir,
                )
                logger.info(f"Updated git repo to {self.git_base}")
            except Exception as e:
                logger.info(f"Updating git repo to Mercurial {hg_base} failed: {e}")

        for patch in needed_stack:
            revision = revisions[patch.phid]

            if patch.commits:
                message = patch.commits[0]["message"]
                author_name = patch.commits[0]["author"]["name"]
                author_email = patch.commits[0]["author"]["email"]
            else:
                message = revision["fields"]["title"]
                author_name = "bugbug"
                author_email = "*****@*****.**"

            logger.info(
                f"Applying {patch.phid} from revision {revision['id']}: {message}"
            )

            hg.import_(
                patches=io.BytesIO(patch.patch.encode("utf-8")),
                message=message.encode("utf-8"),
                user=f"{author_name} <{author_email}>".encode("utf-8"),
            )

            with tempfile.TemporaryDirectory() as tmpdirname:
                temp_file = os.path.join(tmpdirname, "temp.patch")
                with open(temp_file, "w") as f:
                    f.write(patch.patch)

                subprocess.run(
                    ["git", "apply", "--3way", temp_file],
                    check=True,
                    cwd=self.git_repo_dir,
                )
                subprocess.run(
                    [
                        "git",
                        "-c",
                        f"user.name={author_name}",
                        "-c",
                        f"user.email={author_email}",
                        "commit",
                        "-am",
                        message,
                    ],
                    check=True,
                    cwd=self.git_repo_dir,
                )
Exemplo n.º 13
0
def mock_phabricator(mock_config):
    '''
    Mock phabricator authentication process
    '''
    def _response(name):
        path = os.path.join(MOCK_DIR, 'phabricator_{}.json'.format(name))
        assert os.path.exists(path)
        return open(path).read()

    responses.add(
        responses.POST,
        'http://phabricator.test/api/user.whoami',
        body=_response('auth'),
        content_type='application/json',
    )

    responses.add(
        responses.POST,
        'http://phabricator.test/api/differential.diff.search',
        body=_response('diff_search'),
        content_type='application/json',
    )

    responses.add(
        responses.POST,
        'http://phabricator.test/api/differential.revision.search',
        body=_response('revision_search'),
        content_type='application/json',
    )

    responses.add(
        responses.POST,
        'http://phabricator.test/api/differential.query',
        body=_response('diff_query'),
        content_type='application/json',
    )

    responses.add(
        responses.POST,
        'http://phabricator.test/api/differential.getrawdiff',
        body=_response('diff_raw'),
        content_type='application/json',
    )

    responses.add(
        responses.POST,
        'http://phabricator.test/api/differential.createinline',
        body=_response('createinline'),
        content_type='application/json',
    )

    responses.add(
        responses.POST,
        'http://phabricator.test/api/edge.search',
        body=_response('edge_search'),
        content_type='application/json',
    )

    responses.add(
        responses.POST,
        'http://phabricator.test/api/transaction.search',
        body=_response('transaction_search'),
        content_type='application/json',
    )

    responses.add(
        responses.POST,
        'http://phabricator.test/api/harbormaster.target.search',
        body=_response('target_search'),
        content_type='application/json',
    )

    responses.add(
        responses.POST,
        'http://phabricator.test/api/harbormaster.build.search',
        body=_response('build_search'),
        content_type='application/json',
    )

    responses.add(
        responses.POST,
        'http://phabricator.test/api/harbormaster.buildable.search',
        body=_response('buildable_search'),
        content_type='application/json',
    )

    responses.add(
        responses.POST,
        'http://phabricator.test/api/harbormaster.sendmessage',
        body=_response('send_message'),
        content_type='application/json',
    )

    yield PhabricatorAPI(
        url='http://phabricator.test/api/',
        api_key='deadbeef',
    )
Exemplo n.º 14
0
def main():

    args = parse_cli()
    taskcluster.auth(args.taskcluster_client_id, args.taskcluster_access_token)
    taskcluster.load_secrets(
        name=args.taskcluster_secret,
        project_name=config.PROJECT_NAME,
        required=("APP_CHANNEL", "REPORTERS", "PHABRICATOR", "ALLOWED_PATHS"),
        existing={
            "APP_CHANNEL": "development",
            "REPORTERS": [],
            "PUBLICATION": "IN_PATCH",
            "ZERO_COVERAGE_ENABLED": True,
            "ALLOWED_PATHS": ["*"],
        },
    )

    init_logger(
        config.PROJECT_NAME,
        PAPERTRAIL_HOST=taskcluster.secrets.get("PAPERTRAIL_HOST"),
        PAPERTRAIL_PORT=taskcluster.secrets.get("PAPERTRAIL_PORT"),
        SENTRY_DSN=taskcluster.secrets.get("SENTRY_DSN"),
    )

    # Setup settings before stats
    settings.setup(
        taskcluster.secrets["APP_CHANNEL"],
        taskcluster.secrets["PUBLICATION"],
        taskcluster.secrets["ALLOWED_PATHS"],
    )
    # Setup statistics
    datadog_api_key = taskcluster.secrets.get("DATADOG_API_KEY")
    if datadog_api_key:
        stats.auth(datadog_api_key)

    # Load reporters
    reporters = get_reporters(taskcluster.secrets["REPORTERS"])

    # Load index service
    index_service = taskcluster.get_service("index")

    # Load queue service
    queue_service = taskcluster.get_service("queue")

    # Load Phabricator API
    phabricator = taskcluster.secrets["PHABRICATOR"]
    phabricator_reporting_enabled = "phabricator" in reporters
    phabricator_api = PhabricatorAPI(phabricator["api_key"], phabricator["url"])
    if phabricator_reporting_enabled:
        reporters["phabricator"].setup_api(phabricator_api)

    # Load unique revision
    revision = Revision(
        phabricator_api,
        try_task=queue_service.task(settings.try_task_id),
        # Update build status only when phabricator reporting is enabled
        update_build=phabricator_reporting_enabled,
    )

    # Run workflow according to source
    w = Workflow(
        reporters,
        index_service,
        queue_service,
        phabricator_api,
        taskcluster.secrets["ZERO_COVERAGE_ENABLED"],
    )
    try:
        w.run(revision)
    except Exception as e:
        # Log errors to papertrail
        logger.error("Static analysis failure", revision=revision, error=e)

        # Index analysis state
        extras = {}
        if isinstance(e, AnalysisException):
            extras["error_code"] = e.code
            extras["error_message"] = str(e)
        w.index(revision, state="error", **extras)

        # Update Harbormaster status
        revision.update_status(state=BuildState.Fail)

        # Then raise to mark task as erroneous
        raise
Exemplo n.º 15
0
def build_phabricator_api(name: str, url: str, token: str):
    assert url.endswith("/api/"), f"{name} Url {url} does not end with /api/"
    assert token is not None, f"Missing {name} Phabricator token"
    assert len(
        token) == 32, f"{name} Phabricator token must be 32 characters long"
    return PhabricatorAPI(token, url)
Exemplo n.º 16
0
    def apply_phab(self, hg, diff_id):
        def has_revision(revision):
            if not revision:
                return False
            try:
                hg.identify(revision)
                return True
            except hglib.error.CommandError:
                return False

        phabricator_api = PhabricatorAPI(
            api_key=get_secret("PHABRICATOR_TOKEN"),
            url=get_secret("PHABRICATOR_URL"))

        # Get the stack of patches
        stack = phabricator_api.load_patches_stack(diff_id)
        assert len(stack) > 0, "No patches to apply"

        # Find the first unknown base revision
        needed_stack = []
        revisions = {}
        for patch in reversed(stack):
            needed_stack.insert(0, patch)

            # Stop as soon as a base revision is available
            if has_revision(patch.base_revision):
                logger.info(
                    f"Stopping at diff {patch.id} and revision {patch.base_revision}"
                )
                break

        if not needed_stack:
            logger.info("All the patches are already applied")
            return

        # Load all the diff revisions
        diffs = phabricator_api.search_diffs(diff_phid=[p.phid for p in stack])
        revisions = {
            diff["phid"]:
            phabricator_api.load_revision(rev_phid=diff["revisionPHID"])
            for diff in diffs
        }

        # Update repo to base revision
        hg_base = needed_stack[0].base_revision
        if hg_base:
            hg.update(rev=hg_base, clean=True)
            logger.info(f"Updated repo to {hg_base}")

        for patch in needed_stack:
            revision = revisions[patch.phid]

            if patch.commits:
                message = patch.commits[0]["message"]
            else:
                message = revision["fields"]["title"]

            logger.info(
                f"Applying {patch.phid} from revision {revision['id']}: {message}"
            )

            hg.import_(
                patches=io.BytesIO(patch.patch.encode("utf-8")),
                message=message,
                user="******",
            )
Exemplo n.º 17
0
    def apply_phab(self, hg, diff_id):
        def has_revision(revision):
            if not revision:
                return False
            try:
                hg.identify(revision)
                return True
            except hglib.error.CommandError:
                return False

        phabricator_api = PhabricatorAPI(
            api_key=get_secret("PHABRICATOR_TOKEN"),
            url=get_secret("PHABRICATOR_URL"))

        # Get the stack of patches
        stack = phabricator_api.load_patches_stack(diff_id)
        assert len(stack) > 0, "No patches to apply"

        # Find the first unknown base revision
        needed_stack = []
        revisions = {}
        for patch in reversed(stack):
            needed_stack.insert(0, patch)

            # Stop as soon as a base revision is available
            if has_revision(patch.base_revision):
                logger.info(
                    f"Stopping at diff {patch.id} and revision {patch.base_revision}"
                )
                break

        if not needed_stack:
            logger.info("All the patches are already applied")
            return

        # Load all the diff revisions
        diffs = phabricator_api.search_diffs(diff_phid=[p.phid for p in stack])
        revisions = {
            diff["phid"]:
            phabricator_api.load_revision(rev_phid=diff["revisionPHID"],
                                          attachments={"reviewers": True})
            for diff in diffs
        }

        # Update repo to base revision
        hg_base = needed_stack[0].base_revision
        if not has_revision(hg_base):
            logger.warning(
                "Missing base revision {} from Phabricator".format(hg_base))
            hg_base = "tip"

        if hg_base:
            hg.update(rev=hg_base, clean=True)
            logger.info(f"Updated repo to {hg_base}")

            try:
                self.git_base = vcs_map.mercurial_to_git(hg_base)
                subprocess.run(
                    [
                        "git", "checkout", "-b", "analysis_branch",
                        self.git_base
                    ],
                    check=True,
                    cwd=self.git_repo_dir,
                )
                logger.info(f"Updated git repo to {self.git_base}")
            except Exception as e:
                logger.info(
                    f"Updating git repo to Mercurial {hg_base} failed: {e}")

        def load_user(phid):
            if phid.startswith("PHID-USER"):
                return phabricator_api.load_user(user_phid=phid)
            elif phid.startswith("PHID-PROJ"):
                # TODO: Support group reviewers somehow.
                logger.info(f"Skipping group reviewer {phid}")
            else:
                raise Exception(f"Unsupported reviewer {phid}")

        for patch in needed_stack:
            revision = revisions[patch.phid]

            message = "{}\n\n{}".format(revision["fields"]["title"],
                                        revision["fields"]["summary"])

            author_name = None
            author_email = None

            if patch.commits:
                author_name = patch.commits[0]["author"]["name"]
                author_email = patch.commits[0]["author"]["email"]

            if author_name is None:
                author = load_user(revision["fields"]["authorPHID"])
                author_name = author["fields"]["realName"]
                # XXX: Figure out a way to know the email address of the author.
                author_email = author["fields"]["username"]

            reviewers = list(
                filter(
                    None,
                    (load_user(reviewer["reviewerPHID"]) for reviewer in
                     revision["attachments"]["reviewers"]["reviewers"]),
                ))
            reviewers = set(reviewer["fields"]["username"]
                            for reviewer in reviewers)

            if len(reviewers):
                message = replace_reviewers(message, reviewers)

            logger.info(
                f"Applying {patch.phid} from revision {revision['id']}: {message}"
            )

            hg.import_(
                patches=io.BytesIO(patch.patch.encode("utf-8")),
                message=message.encode("utf-8"),
                user=f"{author_name} <{author_email}>".encode("utf-8"),
            )

            with tempfile.TemporaryDirectory() as tmpdirname:
                temp_file = os.path.join(tmpdirname, "temp.patch")
                with open(temp_file, "w") as f:
                    f.write(patch.patch)

                subprocess.run(
                    ["git", "apply", "--3way", temp_file],
                    check=True,
                    cwd=self.git_repo_dir,
                )
                subprocess.run(
                    [
                        "git",
                        "-c",
                        f"user.name={author_name}",
                        "-c",
                        f"user.email={author_email}",
                        "commit",
                        "-am",
                        message,
                    ],
                    check=True,
                    cwd=self.git_repo_dir,
                )
Exemplo n.º 18
0
def PhabricatorMock():
    '''
    Mock phabricator authentication process
    '''
    json_headers = {
        'Content-Type': 'application/json',
    }

    def _response(name):
        path = os.path.join(MOCK_DIR, 'phabricator', '{}.json'.format(name))
        assert os.path.exists(path), 'Missing mock {}'.format(path)
        return open(path).read()

    def _phab_params(request):
        # What a weird way to send parameters
        return json.loads(urllib.parse.parse_qs(request.body)['params'][0])

    def _diff_search(request):
        params = _phab_params(request)
        assert 'constraints' in params
        if 'revisionPHIDs' in params['constraints']:
            # Search from revision
            mock_name = 'search-{}'.format(params['constraints']['revisionPHIDs'][0])
        elif 'phids' in params['constraints']:
            # Search from diffs
            diffs = '-'.join(params['constraints']['phids'])
            mock_name = 'search-{}'.format(diffs)
        else:
            raise Exception('Unsupported diff mock {}'.format(params))
        return (200, json_headers, _response(mock_name))

    def _diff_raw(request):
        params = _phab_params(request)
        assert 'diffID' in params
        return (200, json_headers, _response('raw-{}'.format(params['diffID'])))

    def _edges(request):
        params = _phab_params(request)
        assert 'sourcePHIDs' in params
        return (200, json_headers, _response('edges-{}'.format(params['sourcePHIDs'][0])))

    def _create_artifact(request):
        params = _phab_params(request)
        assert 'buildTargetPHID' in params
        return (200, json_headers, _response('artifact-{}'.format(params['buildTargetPHID'])))

    def _send_message(request):
        params = _phab_params(request)
        assert 'buildTargetPHID' in params
        name = 'message-{}-{}'.format(params['buildTargetPHID'], params['type'])
        if params['unit']:
            name += '-unit'
        if params['lint']:
            name += '-lint'
        return (200, json_headers, _response(name))

    with responses.RequestsMock(assert_all_requests_are_fired=False) as resp:

        resp.add(
            responses.POST,
            'http://phabricator.test/api/user.whoami',
            body=_response('auth'),
            content_type='application/json',
        )

        resp.add_callback(
            responses.POST,
            'http://phabricator.test/api/edge.search',
            callback=_edges,
        )

        resp.add_callback(
            responses.POST,
            'http://phabricator.test/api/differential.diff.search',
            callback=_diff_search,
        )

        resp.add_callback(
            responses.POST,
            'http://phabricator.test/api/differential.getrawdiff',
            callback=_diff_raw,
        )

        resp.add_callback(
            responses.POST,
            'http://phabricator.test/api/harbormaster.createartifact',
            callback=_create_artifact,
        )

        resp.add_callback(
            responses.POST,
            'http://phabricator.test/api/harbormaster.sendmessage',
            callback=_send_message,
        )

        resp.add(
            responses.POST,
            'http://phabricator.test/api/diffusion.repository.search',
            body=_response('repositories'),
            content_type='application/json',
        )

        api = PhabricatorAPI(
            url='http://phabricator.test/api/',
            api_key='deadbeef',
        )
        api.mocks = resp  # used to assert in tests on callbacks
        yield api
Exemplo n.º 19
0
    def apply_phab(self, hg, phabricator_deployment, diff_id):
        if phabricator_deployment == PHAB_PROD:
            api_key = get_secret("PHABRICATOR_TOKEN")
            url = get_secret("PHABRICATOR_URL")
        else:
            api_key = get_secret("PHABRICATOR_DEV_TOKEN")
            url = get_secret("PHABRICATOR_DEV_URL")

        phabricator_api = PhabricatorAPI(api_key=api_key, url=url)

        # Get the stack of patches
        stack = phabricator_api.load_patches_stack(diff_id)
        assert len(stack) > 0, "No patches to apply"

        # Find the first unknown base revision
        needed_stack = []
        revisions = {}
        for patch in reversed(stack):
            needed_stack.insert(0, patch)

            # Stop as soon as a base revision is available
            if self.has_revision(hg, patch.base_revision):
                logger.info(
                    f"Stopping at diff {patch.id} and revision {patch.base_revision}"
                )
                break

        if not needed_stack:
            logger.info("All the patches are already applied")
            return

        # Load all the diff revisions
        diffs = phabricator_api.search_diffs(diff_phid=[p.phid for p in stack])
        revisions = {
            diff["phid"]:
            phabricator_api.load_revision(rev_phid=diff["revisionPHID"],
                                          attachments={"reviewers": True})
            for diff in diffs
        }

        # Update repo to base revision
        hg_base = needed_stack[0].base_revision
        if not self.has_revision(hg, hg_base):
            logger.warning(
                "Missing base revision {} from Phabricator".format(hg_base))
            hg_base = "tip"

        if hg_base:
            hg.update(rev=hg_base, clean=True)
            logger.info(f"Updated repo to {hg_base}")

            if self.git_repo_dir and hg_base != "tip":
                try:
                    self.git_base = tuple(
                        vcs_map.mercurial_to_git(self.git_repo_dir,
                                                 [hg_base]))[0]
                    subprocess.run(
                        [
                            "git", "checkout", "-b", "analysis_branch",
                            self.git_base
                        ],
                        check=True,
                        cwd=self.git_repo_dir,
                    )
                    logger.info(f"Updated git repo to {self.git_base}")
                except Exception as e:
                    logger.info(
                        f"Updating git repo to Mercurial {hg_base} failed: {e}"
                    )

        def load_user(phid):
            if phid.startswith("PHID-USER"):
                return phabricator_api.load_user(user_phid=phid)
            elif phid.startswith("PHID-PROJ"):
                # TODO: Support group reviewers somehow.
                logger.info(f"Skipping group reviewer {phid}")
            else:
                raise Exception(f"Unsupported reviewer {phid}")

        for patch in needed_stack:
            revision = revisions[patch.phid]

            message = "{}\n\n{}".format(revision["fields"]["title"],
                                        revision["fields"]["summary"])

            author_name = None
            author_email = None

            if patch.commits:
                author_name = patch.commits[0]["author"]["name"]
                author_email = patch.commits[0]["author"]["email"]

            if author_name is None:
                author = load_user(revision["fields"]["authorPHID"])
                author_name = author["fields"]["realName"]
                # XXX: Figure out a way to know the email address of the author.
                author_email = author["fields"]["username"]

            reviewers = list(
                filter(
                    None,
                    (load_user(reviewer["reviewerPHID"]) for reviewer in
                     revision["attachments"]["reviewers"]["reviewers"]),
                ))
            reviewers = set(reviewer["fields"]["username"]
                            for reviewer in reviewers)

            if len(reviewers):
                message = replace_reviewers(message, reviewers)

            logger.info(
                f"Applying {patch.phid} from revision {revision['id']}: {message}"
            )

            hg.import_(
                patches=io.BytesIO(patch.patch.encode("utf-8")),
                message=message.encode("utf-8"),
                user=f"{author_name} <{author_email}>".encode("utf-8"),
            )

            if self.git_repo_dir:
                patch_proc = subprocess.Popen(
                    ["patch", "-p1", "--no-backup-if-mismatch", "--force"],
                    stdin=subprocess.PIPE,
                    cwd=self.git_repo_dir,
                )
                patch_proc.communicate(patch.patch.encode("utf-8"))
                assert patch_proc.returncode == 0, "Failed to apply patch"

                subprocess.run(
                    [
                        "git",
                        "-c",
                        f"user.name={author_name}",
                        "-c",
                        f"user.email={author_email}",
                        "commit",
                        "-am",
                        message,
                    ],
                    check=True,
                    cwd=self.git_repo_dir,
                )
Exemplo n.º 20
0
def mock_phabricator(mock_config):
    """
    Mock phabricator authentication process
    """
    def _response(name):
        path = os.path.join(MOCK_DIR, "phabricator_{}.json".format(name))
        assert os.path.exists(path)
        return open(path).read()

    responses.add(
        responses.POST,
        "http://phabricator.test/api/user.whoami",
        body=_response("auth"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/differential.diff.search",
        body=_response("diff_search"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/differential.revision.search",
        body=_response("revision_search"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/differential.query",
        body=_response("diff_query"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/differential.getrawdiff",
        body=_response("diff_raw"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/differential.createinline",
        body=_response("createinline"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/edge.search",
        body=_response("edge_search"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/transaction.search",
        body=_response("transaction_search"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/harbormaster.target.search",
        body=_response("target_search"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/harbormaster.build.search",
        body=_response("build_search"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/harbormaster.buildable.search",
        body=_response("buildable_search"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/harbormaster.sendmessage",
        body=_response("send_message"),
        content_type="application/json",
    )

    yield PhabricatorAPI(url="http://phabricator.test/api/",
                         api_key="deadbeef")
Exemplo n.º 21
0
def mock_phabricator(mock_config):
    """
    Mock phabricator authentication process
    """
    def _response(name):
        path = os.path.join(MOCK_DIR, "phabricator_{}.json".format(name))
        assert os.path.exists(path)
        return open(path).read()

    def diff_search(request):
        payload = dict(urllib.parse.parse_qsl(request.body))
        assert "params" in payload
        params = json.loads(payload["params"])

        name = ["diff_search"]
        for values in params.get("constraints", {}).values():
            name += values

        content = _response("_".join(name))

        return (200, {"Content-Type": "application/json"}, content)

    responses.add(
        responses.POST,
        "http://phabricator.test/api/user.whoami",
        body=_response("auth"),
        content_type="application/json",
    )

    responses.add_callback(
        responses.POST,
        "http://phabricator.test/api/differential.diff.search",
        callback=diff_search,
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/differential.revision.search",
        body=_response("revision_search"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/differential.query",
        body=_response("diff_query"),
        content_type="application/json",
    )

    with open(os.path.join(os.path.join(MOCK_DIR,
                                        "phabricator_patch.diff"))) as f:
        test_patch = f.read()
    responses.add(
        responses.POST,
        "http://phabricator.test/api/differential.getrawdiff",
        body=json.dumps({
            "error_code": None,
            "error_info": None,
            "result": test_patch
        }),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/differential.createinline",
        body=_response("createinline"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/edge.search",
        body=_response("edge_search"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/transaction.search",
        body=_response("transaction_search"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/harbormaster.target.search",
        body=_response("target_search"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/harbormaster.build.search",
        body=_response("build_search"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/harbormaster.buildable.search",
        body=_response("buildable_search"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/harbormaster.sendmessage",
        body=_response("send_message"),
        content_type="application/json",
    )

    responses.add(
        responses.POST,
        "http://phabricator.test/api/diffusion.repository.search",
        body=_response("repository_search"),
        content_type="application/json",
    )

    yield PhabricatorAPI(url="http://phabricator.test/api/",
                         api_key="deadbeef")
Exemplo n.º 22
0
def main(
    taskcluster_secret,
    taskcluster_client_id,
    taskcluster_access_token,
):

    secrets = get_secrets(
        taskcluster_secret,
        config.PROJECT_NAME,
        required=(
            'APP_CHANNEL',
            'REPORTERS',
            'PHABRICATOR',
            'ALLOWED_PATHS',
        ),
        existing={
            'APP_CHANNEL': 'development',
            'REPORTERS': [],
            'PUBLICATION': 'IN_PATCH',
            'ZERO_COVERAGE_ENABLED': True,
            'ALLOWED_PATHS': [
                '*',
            ],
        },
        taskcluster_client_id=taskcluster_client_id,
        taskcluster_access_token=taskcluster_access_token,
    )

    init_logger(
        config.PROJECT_NAME,
        PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'),
        PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'),
        SENTRY_DSN=secrets.get('SENTRY_DSN'),
        MOZDEF=secrets.get('MOZDEF'),
        timestamp=True,
    )

    # Setup settings before stats
    phabricator = secrets['PHABRICATOR']
    settings.setup(
        secrets['APP_CHANNEL'],
        secrets['PUBLICATION'],
        secrets['ALLOWED_PATHS'],
    )
    # Setup statistics
    datadog_api_key = secrets.get('DATADOG_API_KEY')
    if datadog_api_key:
        stats.auth(datadog_api_key)

    # Load reporters
    reporters = get_reporters(
        secrets['REPORTERS'],
        taskcluster_client_id,
        taskcluster_access_token,
    )

    # Load index service
    index_service = get_service(
        'index',
        taskcluster_client_id,
        taskcluster_access_token,
    )

    # Load queue service
    queue_service = get_service(
        'queue',
        taskcluster_client_id,
        taskcluster_access_token,
    )

    # Load Phabricator API
    phabricator_reporting_enabled = 'phabricator' in reporters
    phabricator_api = PhabricatorAPI(phabricator['api_key'],
                                     phabricator['url'])
    if phabricator_reporting_enabled:
        reporters['phabricator'].setup_api(phabricator_api)

    # Load unique revision
    revision = Revision(
        phabricator_api,
        try_task=queue_service.task(settings.try_task_id),

        # Update build status only when phabricator reporting is enabled
        update_build=phabricator_reporting_enabled,
    )

    # Run workflow according to source
    w = Workflow(reporters, index_service, queue_service, phabricator_api,
                 secrets['ZERO_COVERAGE_ENABLED'])
    try:
        w.run(revision)
    except Exception as e:
        # Log errors to papertrail
        logger.error(
            'Static analysis failure',
            revision=revision,
            error=e,
        )

        # Index analysis state
        extras = {}
        if isinstance(e, AnalysisException):
            extras['error_code'] = e.code
            extras['error_message'] = str(e)
        w.index(revision, state='error', **extras)

        # Update Harbormaster status
        revision.update_status(state=BuildState.Fail)

        # Then raise to mark task as erroneous
        raise