Exemple #1
0
def bisect_inbound(runner, logger):
    fetch_config = runner.fetch_config
    options = runner.options
    if not fetch_config.is_inbound():
        raise MozRegressionError('Unable to bissect inbound for `%s`'
                                 % fetch_config.app_name)
    if not options.last_good_revision or not options.first_bad_revision:
        raise MozRegressionError("If bisecting inbound, both --good-rev"
                                 " and --bad-rev must be set")
    return runner.bisect_inbound(options.last_good_revision,
                                 options.first_bad_revision)
Exemple #2
0
    def pushes(self, **kwargs):
        """
        Returns a sorted lists of Push objects. The list can not be empty.

        Basically issue a raw request to the server.
        """
        base_url = '%s/json-pushes?' % self.repo_url
        url = base_url + '&'.join("%s=%s" % kv for kv in six.iteritems(kwargs))
        LOG.debug("Using url: %s" % url)

        response = retry_get(url)
        if response.status_code == 404:
            raise MozRegressionError(
                "The url %r returned a 404 error. Please check the"
                " validity of the url." % url)
        response.raise_for_status()
        data = response.json()
        if not data:
            raise EmptyPushlogError(
                "The url %r contains no pushlog. Maybe use another range ?" %
                url)
        pushlog = []
        for key in sorted(data):
            pushlog.append(Push(key, data[key]))
        return pushlog
Exemple #3
0
    def find_inbound_changesets(self, days_required=4):
        self._logger.info("... attempting to bisect inbound builds (starting"
                          " from %d days ago, to make sure no inbound revision"
                          " is missed)" % days_required)
        infos = {}
        days = days_required - 1
        too_many_attempts = False
        first_date = min(self.good_date, self.bad_date)
        while 'changeset' not in infos:
            days += 1
            if days >= days_required + 3:
                too_many_attempts = True
                break
            prev_date = first_date - datetime.timedelta(days=days)
            build_data = self.build_data
            infos = build_data.get_build_infos_for_date(prev_date)
        if days > days_required and not too_many_attempts:
            self._logger.info("At least one build folder was invalid, we have"
                              " to start from %d days ago." % days)

        if not self.find_fix:
            good_rev = infos.get('changeset')
            bad_rev = self.bad_revision
        else:
            good_rev = self.good_revision
            bad_rev = infos.get('changeset')
        if bad_rev is None or good_rev is None:
            # old nightly builds do not have the changeset information
            # so we can't go on inbound. Anyway, these are probably too
            # old and won't even exists on inbound.
            raise MozRegressionError(
                "Not enough changeset information to produce initial inbound"
                " regression range. Builds are probably too old.")

        return good_rev, bad_rev
    def init_worker(self, fetch_config, options):
        """
        Create and initialize the worker.

        Should be subclassed to configure the worker, and should return the
        worker method that should start the work.
        """
        self.options = options

        # global preferences
        global_prefs = get_prefs()
        self.global_prefs = global_prefs
        # apply the global prefs now
        apply_prefs(global_prefs)

        fetch_config.set_base_url(global_prefs["archive_base_url"])

        download_dir = global_prefs["persist"]
        if not download_dir:
            download_dir = self.mainwindow.persist
        persist_limit = PersistLimit(abs(global_prefs["persist_size_limit"]) * 1073741824)
        self.download_manager = GuiBuildDownloadManager(download_dir, persist_limit)
        self.test_runner = GuiTestRunner()
        self.thread = QThread()

        # options for the app launcher
        launcher_kwargs = {}
        for name in ("profile", "preferences"):
            if name in options:
                value = options[name]
                if value:
                    launcher_kwargs[name] = value

        # add add-ons paths to the app launcher
        launcher_kwargs["addons"] = options["addons"]
        self.test_runner.launcher_kwargs = launcher_kwargs

        launcher_kwargs["cmdargs"] = []

        if options["profile_persistence"] in ("clone-first", "reuse") or options["profile"]:
            launcher_kwargs["cmdargs"] += ["--allow-downgrade"]

        # Thunderbird will fail to start if passed an URL arg
        if options.get("url") and fetch_config.app_name != "thunderbird":
            launcher_kwargs["cmdargs"] += [options["url"]]

        # Lang only works for firefox-l10n
        if options.get("lang"):
            if options["application"] == "firefox-l10n":
                fetch_config.set_lang(options["lang"])
            else:
                raise MozRegressionError("Invalid lang argument")

        self.worker = self.worker_class(fetch_config, self.test_runner, self.download_manager)
        # Move self.bisector in the thread. This will
        # allow to the self.bisector slots (connected after the move)
        # to be automatically called in the thread.
        self.worker.moveToThread(self.thread)
        self.worker_created.emit(self.worker)
Exemple #5
0
    def handle_merge(self):
        # let's check if we are facing a merge, and in that case,
        # continue the bisection from the merged branch.
        result = None

        LOG.debug("Starting merge handling...")
        # we have to check the commit of the most recent push
        most_recent_push = self.build_range[1]
        jp = JsonPushes(most_recent_push.repo_name)
        push = jp.push(most_recent_push.changeset, full='1')
        msg = push.changeset['desc']
        LOG.debug("Found commit message:\n%s\n" % msg)
        branch = find_branch_in_merge_commit(msg)
        if not (branch and len(push.changesets) >= 2):
            return
        try:
            # so, this is a merge. We can find the oldest and youngest
            # changesets, and the branch where the merge comes from.
            oldest = push.changesets[0]['node']
            # exclude the merge commit
            youngest = push.changesets[-2]['node']
            LOG.debug("This is a merge from %s" % branch)

            # we can't use directly the youngest changeset because we
            # don't know yet if it is good.
            #
            # PUSH1    PUSH2
            # [1 2] [3 4 5 6 7]
            #    G    MERGE  B
            #
            # so first, grab it. This needs to be done on the right branch.
            jp2 = JsonPushes(branch)
            raw = [int(p.push_id) for p in
                   jp2.pushes_within_changes(oldest, youngest)]
            data = jp2.pushes(
                startID=str(min(raw) - 2),
                endID=str(max(raw)),
            )

            oldest = data[0].changesets[0]
            youngest = data[-1].changesets[-1]

            # we are ready to bisect further
            LOG.info("************* Switching to %s" % branch)
            gr, br = self._reverse_if_find_fix(oldest, youngest)
            result = (branch, gr, br)
        except MozRegressionError:
            LOG.debug("Got exception", exc_info=True)
            raise MozRegressionError(
                "Unable to exploit the merge commit. Origin branch is {}, and"
                " the commit message for {} was:\n{}".format(
                    most_recent_push.repo_name,
                    most_recent_push.short_changeset,
                    msg
                )
            )
        LOG.debug('End merge handling')
        return result
Exemple #6
0
def get_defaults(conf_path):
    """
    Get custom defaults from configuration file in argument.
    """
    defaults = dict(DEFAULTS)
    try:
        config = ConfigObj(conf_path)
    except ParseError, exc:
        raise MozRegressionError(
            "Error while reading the config file %s:\n  %s" % (conf_path, exc))
Exemple #7
0
def bisect_nightlies(runner, logger):
    default_bad_date = str(datetime.date.today())
    default_good_date = "2009-01-01"
    fetch_config = runner.fetch_config
    options = runner.options
    # TODO: currently every fetch_config is nightly aware. Shoud we test
    # for this to be sure here ?
    fetch_config.set_nightly_repo(options.repo)
    if not options.bad_release and not options.bad_date:
        options.bad_date = default_bad_date
        logger.info("No 'bad' date specified, using %s" % options.bad_date)
    elif options.bad_release and options.bad_date:
        raise MozRegressionError("Options '--bad_release' and '--bad_date'"
                                 " are incompatible.")
    elif options.bad_release:
        options.bad_date = date_of_release(options.bad_release)
        logger.info("Using 'bad' date %s for release %s"
                    % (options.bad_date, options.bad_release))
    if not options.good_release and not options.good_date:
        options.good_date = default_good_date
        logger.info("No 'good' date specified, using %s"
                    % options.good_date)
    elif options.good_release and options.good_date:
        raise MozRegressionError("Options '--good_release' and '--good_date'"
                                 " are incompatible.")
    elif options.good_release:
        options.good_date = date_of_release(options.good_release)
        logger.info("Using 'good' date %s for release %s"
                    % (options.good_date, options.good_release))

    good_date = parse_date(options.good_date)
    bad_date = parse_date(options.bad_date)
    if good_date > bad_date and not options.find_fix:
        raise MozRegressionError(("Good date %s is later than bad date %s."
                                  " Maybe you wanted to use the --find-fix"
                                  " flag ?") % (good_date, bad_date))
    elif good_date < bad_date and options.find_fix:
        raise MozRegressionError(("Bad date %s is later than good date %s."
                                  " You should not use the --find-fix flag"
                                  " in this case...") % (bad_date, good_date))

    return runner.bisect_nightlies(good_date, bad_date)
Exemple #8
0
def get_config(conf_path):
    """
    Get custom defaults from configuration file in argument.
    """
    config = dict(DEFAULTS)
    try:
        config_obj = ConfigObj(conf_path)
    except ParseError as exc:
        raise MozRegressionError("Error while reading the config file %s:\n  %s" % (conf_path, exc))
    config.update(config_obj)

    return config
Exemple #9
0
 def _request(self, url):
     response = retry_get(url)
     if response.status_code == 404:
         raise MozRegressionError(
             "The url %r returned a 404 error. Please check the"
             " validity of the url." % url)
     response.raise_for_status()
     pushlog = response.json()
     if not pushlog:
         raise EmptyPushlogError(
             "The url %r contains no pushlog. Maybe use another range ?" %
             url)
     return pushlog
Exemple #10
0
 def get_url(self, branch_name_or_alias):
     try:
         return self._branches[self.get_name(branch_name_or_alias)]
     except KeyError:
         raise MozRegressionError("No such branch '%s'." %
                                  branch_name_or_alias)
Exemple #11
0
    def validate(self):
        """
        Validate the options, define the `action` and `fetch_config` that
        should be used to run the application.
        """
        options = self.options

        user_defined_bits = options.bits is not None
        options.bits = parse_bits(options.bits or mozinfo.bits)
        if options.arch is not None:
            if options.app != "gve":
                self.logger.warning("--arch ignored for non-GVE app.")
                options.arch = None

        fetch_config = create_config(options.app, mozinfo.os, options.bits,
                                     mozinfo.processor, options.arch)
        if options.lang:
            if options.app != "firefox-l10n":
                raise MozRegressionError(
                    "--lang is only valid with --app=firefox-l10n")
            fetch_config.set_lang(options.lang)
        elif options.app == "firefox-l10n":
            raise MozRegressionError(
                "app 'firefox-l10n' requires a --lang argument")
        if options.build_type:
            try:
                fetch_config.set_build_type(options.build_type)
            except MozRegressionError as msg:
                self.logger.warning("%s (Defaulting to %r)" %
                                    (msg, fetch_config.build_type))
        self.fetch_config = fetch_config

        fetch_config.set_repo(options.repo)
        fetch_config.set_base_url(options.archive_base_url)

        if (not user_defined_bits and options.bits == 64
                and mozinfo.os == "win"
                and 32 in fetch_config.available_bits()):
            # inform users on windows that we are using 64 bit builds.
            self.logger.info("bits option not specified, using 64-bit builds.")

        if options.bits == 32 and mozinfo.os == "mac":
            self.logger.info("only 64-bit builds available for mac, using "
                             "64-bit builds")

        if fetch_config.is_integration() and fetch_config.tk_needs_auth():
            creds = tc_authenticate(self.logger)
            fetch_config.set_tk_credentials(creds)

        # set action for just use changset or data to bisect
        if options.launch:
            options.launch = self._convert_to_bisect_arg(options.launch)
            self.action = "launch_integration"
            if is_date_or_datetime(
                    options.launch) and fetch_config.should_use_archive():
                self.action = "launch_nightlies"
        else:
            # define good/bad default values if required
            default_good_date, default_bad_date = get_default_date_range(
                fetch_config)
            if options.find_fix:
                default_bad_date, default_good_date = (
                    default_good_date,
                    default_bad_date,
                )
            if not options.bad:
                options.bad = default_bad_date
                self.logger.info("No 'bad' option specified, using %s" %
                                 options.bad)
            else:
                options.bad = self._convert_to_bisect_arg(options.bad)
            if not options.good:
                options.good = default_good_date
                self.logger.info("No 'good' option specified, using %s" %
                                 options.good)
            else:
                options.good = self._convert_to_bisect_arg(options.good)

            self.action = "bisect_integration"
            if is_date_or_datetime(options.good) and is_date_or_datetime(
                    options.bad):
                if not options.find_fix and to_datetime(
                        options.good) > to_datetime(options.bad):
                    raise MozRegressionError(
                        ("Good date %s is later than bad date %s."
                         " Maybe you wanted to use the --find-fix"
                         " flag?") % (options.good, options.bad))
                elif options.find_fix and to_datetime(
                        options.good) < to_datetime(options.bad):
                    raise MozRegressionError(
                        ("Bad date %s is later than good date %s."
                         " You should not use the --find-fix flag"
                         " in this case...") % (options.bad, options.good))
                if fetch_config.should_use_archive():
                    self.action = "bisect_nightlies"
        if (self.action in ("launch_integration", "bisect_integration")
                and not fetch_config.is_integration()):
            raise MozRegressionError("Unable to bisect integration for `%s`" %
                                     fetch_config.app_name)
        options.preferences = preferences(options.prefs_files, options.prefs,
                                          self.logger)
        # convert GiB to bytes.
        options.persist_size_limit = int(
            abs(float(options.persist_size_limit)) * 1073741824)
Exemple #12
0
    def validate(self):
        """
        Validate the options, define the `action` and `fetch_config` that
        should be used to run the application.
        """
        options = self.options

        user_defined_bits = options.bits is not None
        options.bits = parse_bits(options.bits or mozinfo.bits)
        fetch_config = create_config(options.app, mozinfo.os, options.bits,
                                     mozinfo.processor)
        if options.build_type:
            try:
                fetch_config.set_build_type(options.build_type)
            except MozRegressionError as msg:
                self.logger.warning(
                    "%s (Defaulting to %r)" % (msg, fetch_config.build_type)
                )
        self.fetch_config = fetch_config

        fetch_config.set_repo(options.repo)
        if fetch_config.is_nightly():
            fetch_config.set_base_url(options.archive_base_url)

        if not user_defined_bits and \
                options.bits == 64 and \
                mozinfo.os == 'win' and \
                32 in fetch_config.available_bits():
            # inform users on windows that we are using 64 bit builds.
            self.logger.info("bits option not specified, using 64-bit builds.")

        if options.bits == 32 and mozinfo.os == 'mac':
            self.logger.info("only 64-bit builds available for mac, using "
                             "64-bit builds")

        if fetch_config.is_inbound() and fetch_config.tk_needs_auth():
            creds = tc_authenticate(self.logger)
            fetch_config.set_tk_credentials(creds)

        # set action for just use changset or data to bisect
        if options.launch:
            options.launch = self._convert_to_bisect_arg(options.launch)
            self.action = "launch_inbound"
            if is_date_or_datetime(options.launch) and \
                    not fetch_config.should_use_taskcluster():
                self.action = "launch_nightlies"
        else:
            # define good/bad default values if required
            default_good_date, default_bad_date = \
                get_default_date_range(fetch_config)
            if options.find_fix:
                default_bad_date, default_good_date = \
                    default_good_date, default_bad_date
            if not options.bad:
                options.bad = default_bad_date
                self.logger.info("No 'bad' option specified, using %s"
                                 % options.bad)
            else:
                options.bad = self._convert_to_bisect_arg(options.bad)
            if not options.good:
                options.good = default_good_date
                self.logger.info("No 'good' option specified, using %s"
                                 % options.good)
            else:
                options.good = self._convert_to_bisect_arg(options.good)

            self.action = "bisect_inbounds"
            if is_date_or_datetime(options.good) and \
                    is_date_or_datetime(options.bad):
                if not options.find_fix and \
                        to_datetime(options.good) > to_datetime(options.bad):
                    raise MozRegressionError(
                        ("Good date %s is later than bad date %s."
                         " Maybe you wanted to use the --find-fix"
                         " flag?") % (options.good, options.bad))
                elif options.find_fix and \
                        to_datetime(options.good) < to_datetime(options.bad):
                    raise MozRegressionError(
                        ("Bad date %s is later than good date %s."
                         " You should not use the --find-fix flag"
                         " in this case...") % (options.bad, options.good))
                if not fetch_config.should_use_taskcluster():
                    self.action = "bisect_nightlies"
        if self.action in ('launch_inbound', 'bisect_inbounds')\
                and not fetch_config.is_inbound():
            raise MozRegressionError('Unable to bisect inbound for `%s`'
                                     % fetch_config.app_name)
        options.preferences = preferences(options.prefs_files, options.prefs, self.logger)
        # convert GiB to bytes.
        options.persist_size_limit = \
            int(abs(float(options.persist_size_limit)) * 1073741824)
Exemple #13
0
    def handle_merge(self):
        # let's check if we are facing a merge, and in that case,
        # continue the bisection from the merged branch.
        result = None

        LOG.debug("Starting merge handling...")
        # we have to check the commit of the most recent push
        most_recent_push = self.build_range[1]
        jp = JsonPushes(most_recent_push.repo_name)
        push = jp.push(most_recent_push.changeset, full='1')
        msg = push.changeset['desc']
        LOG.debug("Found commit message:\n%s\n" % msg)
        branch = find_branch_in_merge_commit(msg, most_recent_push.repo_name)
        if not (branch and len(push.changesets) >= 2):
            # We did not find a branch, lets check the integration branches if we are bisecting m-c
            LOG.debug(
                "Did not find a branch, checking all integration branches")
            if get_name(most_recent_push.repo_name) == 'mozilla-central' and \
               len(push.changesets) >= 2:
                branch = self._choose_integration_branch(
                    most_recent_push.changeset)
                oldest = push.changesets[0]['node']
                youngest = push.changesets[-1]['node']
                LOG.info("************* Switching to %s by"
                         " process of elimination (no branch detected in"
                         " commit message)" % branch)
            else:
                return
        else:
            # so, this is a merge. see how many changesets are in it, if it
            # is just one, we have our answer
            if len(push.changesets) == 2:
                LOG.info("Merge commit has only two revisions (one of which "
                         "is the merge): we are done")
                return

            # Otherwise, we can find the oldest and youngest
            # changesets, and the branch where the merge comes from.
            oldest = push.changesets[0]['node']
            # exclude the merge commit
            youngest = push.changesets[-2]['node']
            LOG.info("************* Switching to %s" % branch)

        # we can't use directly the oldest changeset because we
        # don't know yet if it is good.
        #
        # PUSH1    PUSH2
        # [1 2] [3 4 5 6 7]
        #    G    MERGE  B
        #
        # so first grab the previous push to get the last known good
        # changeset. This needs to be done on the right branch.
        try:
            jp2 = JsonPushes(branch)
            raw = [
                int(p.push_id)
                for p in jp2.pushes_within_changes(oldest, youngest)
            ]
            data = jp2.pushes(
                startID=str(min(raw) - 2),
                endID=str(max(raw)),
            )

            older = data[0].changeset
            youngest = data[-1].changeset

            # we are ready to bisect further
            gr, br = self._reverse_if_find_fix(older, youngest)
            result = (branch, gr, br)
        except MozRegressionError:
            LOG.debug("Got exception", exc_info=True)
            raise MozRegressionError(
                "Unable to exploit the merge commit. Origin branch is {}, and"
                " the commit message for {} was:\n{}".format(
                    most_recent_push.repo_name,
                    most_recent_push.short_changeset, msg))
        LOG.debug('End merge handling')
        return result
Exemple #14
0
class InboundHandler(BisectorHandler):
    create_range = staticmethod(range_for_inbounds)

    def _print_progress(self, new_data):
        LOG.info("Narrowed inbound regression window from [%s, %s]"
                 " (%d builds) to [%s, %s] (%d builds)"
                 " (~%d steps left)" %
                 (self.build_range[0].short_changeset,
                  self.build_range[-1].short_changeset, len(self.build_range),
                  new_data[0].short_changeset, new_data[-1].short_changeset,
                  len(new_data), compute_steps_left(len(new_data))))

    def user_exit(self, mid):
        words = self._reverse_if_find_fix('Newest', 'Oldest')
        LOG.info('%s known good inbound revision: %s' %
                 (words[0], self.good_revision))
        LOG.info('%s known bad inbound revision: %s' %
                 (words[1], self.bad_revision))

    def _choose_integration_branch(self, changeset):
        """
        Tries to determine which integration branch the given changeset
        originated from by checking the date the changeset first showed up
        in each repo. The repo with the earliest date is chosen.
        """
        landings = {}
        for k in ("autoland", "mozilla-inbound"):
            jp = JsonPushes(k)

            try:
                push = jp.push(changeset, full='1')
                landings[k] = push.timestamp
            except EmptyPushlogError:
                LOG.debug("Didn't find %s in %s" % (changeset, k))

        repo = min(landings, key=landings.get)
        LOG.debug("Repo '%s' seems to have the earliest push" % repo)
        return repo

    def handle_merge(self):
        # let's check if we are facing a merge, and in that case,
        # continue the bisection from the merged branch.
        result = None

        LOG.debug("Starting merge handling...")
        # we have to check the commit of the most recent push
        most_recent_push = self.build_range[1]
        jp = JsonPushes(most_recent_push.repo_name)
        push = jp.push(most_recent_push.changeset, full='1')
        msg = push.changeset['desc']
        LOG.debug("Found commit message:\n%s\n" % msg)
        branch = find_branch_in_merge_commit(msg, most_recent_push.repo_name)
        if not (branch and len(push.changesets) >= 2):
            # We did not find a branch, lets check the integration branches if we are bisecting m-c
            LOG.debug(
                "Did not find a branch, checking all integration branches")
            if get_name(most_recent_push.repo_name) == 'mozilla-central' and \
               len(push.changesets) >= 2:
                branch = self._choose_integration_branch(
                    most_recent_push.changeset)
                jp2 = JsonPushes(branch)
                try:
                    data = jp2.pushes_within_changes(
                        push.changesets[0]['node'],
                        push.changesets[-1]['node'])
                except MozRegressionError, exc:
                    LOG.error(
                        "Failed to find changes in branch '%s' (error: %s)" %
                        (branch, exc))
                    raise
                LOG.info("************* Switching to %s by"
                         " process of elimination (no branch detected in"
                         " commit message)" % branch)
                gr, br = self._reverse_if_find_fix(data[0].changeset,
                                                   data[-1].changeset)
                return (branch, gr, br)
            else:
                return
        try:
            # so, this is a merge. see how many changesets are in it, if it
            # is just one, we have our answer
            if len(push.changesets) == 2:
                LOG.info("Merge commit has only two revisions (one of which "
                         "is the merge): we are done")
                return

            # Otherwise, we can find the oldest and youngest
            # changesets, and the branch where the merge comes from.
            oldest = push.changesets[0]['node']
            # exclude the merge commit
            youngest = push.changesets[-2]['node']
            LOG.debug("This is a merge from %s" % branch)

            # we can't use directly the youngest changeset because we
            # don't know yet if it is good.
            #
            # PUSH1    PUSH2
            # [1 2] [3 4 5 6 7]
            #    G    MERGE  B
            #
            # so first, grab it. This needs to be done on the right branch.
            jp2 = JsonPushes(branch)
            raw = [
                int(p.push_id)
                for p in jp2.pushes_within_changes(oldest, youngest)
            ]
            data = jp2.pushes(
                startID=str(min(raw) - 2),
                endID=str(max(raw)),
            )

            oldest = data[0].changesets[0]
            youngest = data[-1].changesets[-1]

            # we are ready to bisect further
            LOG.info("************* Switching to %s" % branch)
            gr, br = self._reverse_if_find_fix(oldest, youngest)
            result = (branch, gr, br)
        except MozRegressionError:
            LOG.debug("Got exception", exc_info=True)
            raise MozRegressionError(
                "Unable to exploit the merge commit. Origin branch is {}, and"
                " the commit message for {} was:\n{}".format(
                    most_recent_push.repo_name,
                    most_recent_push.short_changeset, msg))
        LOG.debug('End merge handling')
        return result