Exemplo n.º 1
0
Arquivo: push.py Projeto: jmaher/mozci
    def send_emails(self, total, stats, error_line):

        today = datetime.datetime.strftime(datetime.datetime.now(), "%Y-%m-%d")

        stats = "\n".join([f"- {stat}" for stat in stats])

        environment = self.option("environment")
        notify_email(
            emails=config.get("emails", {}).get("monitoring"),
            subject=f"{environment} classify-eval report generated the {today}",
            content=EMAIL_CLASSIFY_EVAL.format(
                today=today,
                total=total,
                error_line=f"**{error_line}**" if error_line else "",
                stats=stats,
            ),
        )
Exemplo n.º 2
0
    def _trigger_action(self, action, payload):
        tc_firefox_ci_credentials = config.get("taskcluster_firefox_ci", {})
        client_id = tc_firefox_ci_credentials.get("client_id")
        access_token = tc_firefox_ci_credentials.get("access_token")
        assert (
            client_id and access_token
        ), "Missing Taskcluster Firefox CI credentials in mozci config secret"

        options = taskcluster.optionsFromEnvironment()
        options["rootUrl"] = PRODUCTION_TASKCLUSTER_ROOT_URL
        options["credentials"] = {
            "clientId": client_id,
            "accessToken": access_token,
        }
        hooks = taskcluster.Hooks(options)

        result = hooks.triggerHook(action["hookGroupId"], action["hookId"],
                                   payload)
        return result["status"]["taskId"]
Exemplo n.º 3
0
    def handle(self) -> None:
        branch = self.option("branch")
        environment = self.option("environment")
        matrix_room = config.get("matrix-room-id")
        current_task_id = os.environ.get("TASK_ID")

        try:
            nb_pushes = int(self.option("nb-pushes"))
        except ValueError:
            self.line("<error>Provided --nb-pushes should be an int.</error>")
            exit(1)

        self.line("<comment>Loading pushes...</comment>")
        self.pushes = make_push_objects(nb=nb_pushes, branch=branch)
        nb_pushes = len(self.pushes)

        to_notify: Dict[str, Dict[str, Any]] = {}
        for index, push in enumerate(self.pushes, start=1):
            self.line(
                f"<comment>Processing push {index}/{nb_pushes}: {push.push_uuid}</comment>"
            )
            backfill_tasks = []

            try:
                indexed_tasks = list_indexed_tasks(
                    f"gecko.v2.{push.branch}.revision.{push.rev}.taskgraph.actions"
                )
            except requests.exceptions.HTTPError as e:
                self.line(
                    f"<error>Couldn't fetch indexed tasks on push {push.push_uuid}: {e}</error>"
                )
                continue

            for indexed_task in indexed_tasks:
                task_id = indexed_task["taskId"]
                try:
                    children_tasks = list_dependent_tasks(task_id)
                except requests.exceptions.HTTPError as e:
                    self.line(
                        f"<error>Couldn't fetch dependent tasks of indexed task {task_id} on push {push.push_uuid}: {e}</error>"
                    )
                    continue

                for child_task in children_tasks:
                    task_section = child_task.get("task", {})
                    task_action = task_section.get("tags",
                                                   {}).get("action", "")
                    # We are looking for the Treeherder symbol because Sheriffs are
                    # only interested in backfill-tasks holding the '-bk' suffix in TH
                    th_symbol = (task_section.get("extra", {}).get(
                        "treeherder", {}).get("symbol", ""))
                    status = child_task.get("status", {})
                    if task_action == "backfill-task" and th_symbol.endswith(
                            "-bk"):
                        assert status.get(
                            "taskId"
                        ), "Missing taskId attribute in backfill task status"
                        label = task_section.get(
                            "tags", {}).get("label") or task_section.get(
                                "metadata", {}).get("name")
                        assert (
                            label
                        ), "Missing label attribute in backfill task tags or name attribute in backfill task metadata"
                        assert status.get(
                            "state"
                        ), "Missing state attribute in backfill task status"
                        backfill_tasks.append(
                            BackfillTask(status["taskId"], label, th_symbol,
                                         status["state"]))
                    else:
                        logger.debug(
                            f"Skipping non-backfill task {status.get('taskId')}"
                        )

            def group_key(task):
                return task.th_symbol

            # Sorting backfill tasks by their Treeherder symbol
            backfill_tasks = sorted(backfill_tasks, key=group_key)
            # Grouping ordered backfill tasks by their associated Treeherder symbol
            for th_symbol, tasks_iter in groupby(backfill_tasks, group_key):
                if th_symbol not in to_notify:
                    to_notify[th_symbol] = {
                        "newest_push": None,
                        "backfill_tasks": set(),
                    }

                # make_push_objects returns the latest pushes in chronological order from oldest to newest
                # We only need to store the newest Push that appeared for this Treeherder symbol
                to_notify[th_symbol]["newest_push"] = push
                # Storing all backfill tasks for this symbol across multiple pushes
                to_notify[th_symbol]["backfill_tasks"].update(tasks_iter)

        for th_symbol, data in to_notify.items():
            all_backfill_tasks = data["backfill_tasks"]
            # Checking that all backfill tasks for this symbol are in a "final" state
            if not all(task.state in TASK_FINAL_STATES
                       for task in all_backfill_tasks):
                logger.debug(
                    f"Not all backfill tasks for the Treeherder symbol {th_symbol} are in a final state, not notifying now."
                )
                continue

            newest_push = data["newest_push"]
            index_path = f"project.mozci.check-backfill.{environment}.{newest_push.branch}.{newest_push.rev}.{th_symbol}"
            try:
                find_task_id(index_path,
                             root_url=COMMUNITY_TASKCLUSTER_ROOT_URL)
            except requests.exceptions.HTTPError:
                pass
            else:
                logger.debug(
                    f"A notification was already sent for the backfill tasks associated to the Treeherder symbol {th_symbol}."
                )
                continue

            try:
                parents = [
                    parent
                    for parent in newest_push._iterate_parents(max_depth=20)
                ]
            except Exception as e:
                logger.debug(
                    f"Failed to load the last twenty parent pushes for push {newest_push.push_uuid}, because: {e}."
                )
                parents = None

            cleaned_label = re.sub(r"(-e10s|-1proc)?(-\d+)?$", "",
                                   all_backfill_tasks.pop().label)
            notification = NOTIFICATION_BACKFILL_GROUP_COMPLETED.format(
                th_symbol=th_symbol,
                push=newest_push,
                tochange=f"&tochange={newest_push.child.rev}",
                fromchange=f"&fromchange={parents[-1].rev}" if parents else "",
                searchstr=f"&searchStr={cleaned_label}",
            )

            if not matrix_room:
                self.line(
                    f"<comment>A notification should be sent for the backfill tasks associated to the Treeherder symbol {th_symbol} but no matrix room was provided in the secret.</comment>"
                )
                logger.debug(f"The notification: {notification}")
                continue

            # Sending a notification to the Matrix channel defined in secret
            notify_matrix(
                room=matrix_room,
                body=notification,
            )

            if not current_task_id:
                self.line(
                    f"<comment>The current task should be indexed in {index_path} but TASK_ID environment variable isn't set.</comment>"
                )
                continue

            # Populating the index with the current task to prevent sending the notification once again
            index_current_task(
                index_path,
                root_url=COMMUNITY_TASKCLUSTER_ROOT_URL,
            )
Exemplo n.º 4
0
    PushNotFound,
)
from mozci.task import (
    GroupResult,
    GroupSummary,
    LabelSummary,
    RunnableSummary,
    Status,
    Task,
    TestTask,
)
from mozci.util.hgmo import HGMO

BASE_INDEX = "gecko.v2.{branch}.revision.{rev}"

MAX_DEPTH = config.get("maxdepth", 20)
"""The maximum number of parents or children to look for previous/next task runs,
when the task did not run on the currently considered push.
"""

FAILURE_CLASSES = ("not classified", "fixed by commit")


class Push:
    """A representation of a single push.

    Args:
        revs (list): List of revisions of commits in the push (top-most is the first element).
        branch (str): Branch to look on (default: autoland).
    """
Exemplo n.º 5
0
Arquivo: push.py Projeto: jmaher/mozci
    def handle(self) -> None:
        self.branch = self.argument("branch")

        pushes = classify_commands_pushes(
            self.branch,
            self.option("from-date"),
            self.option("to-date"),
            self.option("rev"),
        )

        try:
            medium_conf = float(self.option("medium-confidence"))
        except ValueError:
            self.line(
                "<error>Provided --medium-confidence should be a float.</error>"
            )
            exit(1)
        try:
            high_conf = float(self.option("high-confidence"))
        except ValueError:
            self.line(
                "<error>Provided --high-confidence should be a float.</error>")
            exit(1)

        retrigger_unknown = True if self.option("retrigger-unknown") else False
        output = self.option("output")
        if output and not os.path.isdir(output):
            os.makedirs(output)
            self.line(
                "<comment>Provided --output pointed to a inexistent directory that is now created.</comment>"
            )

        for push in pushes:
            try:
                classification, regressions = push.classify(
                    intermittent_confidence_threshold=medium_conf,
                    real_confidence_threshold=high_conf,
                )
                if retrigger_unknown:
                    for _, tasks in regressions.unknown.items():
                        retrigger(tasks=tasks, repeat_retrigger=1)
                self.line(
                    f"Push associated with the head revision {push.rev} on "
                    f"the branch {self.branch} is classified as {classification.name}"
                )
            except Exception as e:
                self.line(
                    f"<error>Couldn't classify push {push.push_uuid}: {e}.</error>"
                )
                # Print the error stacktrace in red
                self.line(f"<error>{traceback.format_exc()}</error>")
                continue

            if self.option("show-intermittents"):
                self.line("-" * 50)
                self.line(
                    "Printing tasks that should be marked as intermittent failures:"
                )
                for task in regressions.intermittent:
                    self.line(task)
                self.line("-" * 50)

            if output:
                to_save = {
                    "push": {
                        "id": push.push_uuid,
                        "classification": classification.name,
                    },
                    "failures": {
                        "real": {
                            group: [{
                                "task_id": task.id,
                                "label": task.label
                            } for task in failing_tasks]
                            for group, failing_tasks in
                            regressions.real.items()
                        },
                        "intermittent": {
                            group: [{
                                "task_id": task.id,
                                "label": task.label
                            } for task in failing_tasks]
                            for group, failing_tasks in
                            regressions.intermittent.items()
                        },
                        "unknown": {
                            group: [{
                                "task_id": task.id,
                                "label": task.label
                            } for task in failing_tasks]
                            for group, failing_tasks in
                            regressions.unknown.items()
                        },
                    },
                }

                filename = f"{output}/classify_output_{self.branch}_{push.rev}.json"
                with open(filename, "w") as file:
                    json.dump(to_save, file, indent=2)

                self.line(
                    f"Classification and regressions details for push {push.push_uuid} were saved in {filename} JSON file"
                )

            # Send a notification when some emails are declared in the config
            emails = config.get("emails", {}).get("classifications")
            matrix_room = config.get("matrix-room-id", None)
            if emails or matrix_room:
                # Load previous classification from taskcluster
                try:
                    previous = push.get_existing_classification(
                        self.option("environment"))
                except SourcesNotFound:
                    # We still want to send a notification if the current one is bad
                    previous = None

                self.send_notifications(emails, matrix_room, push, previous,
                                        classification, regressions)