def pushlog_within_changes(self, fromchange, tochange, raw=False, verbose=True): """ Returns pushlog json objects (python dicts). The result will contains all pushlogs including the pushlogs for fromchange and tochange. These parameters can be dates (date or datetime instances) or changesets (str objects). This will return at least one pushlog. In case of error it will raise a MozRegressionError. """ from_is_date = is_date_or_datetime(fromchange) to_is_date = is_date_or_datetime(tochange) kwargs = {} if not from_is_date: # the first changeset is not taken into account in the result. # let's add it directly with this request chsets = self._request(self.json_pushes_url(changeset=fromchange)) kwargs['fromchange'] = fromchange else: chsets = {} kwargs['startdate'] = fromchange.strftime('%Y-%m-%d') if not to_is_date: kwargs['tochange'] = tochange else: # add one day to take the last day in account kwargs['enddate'] = tochange + datetime.timedelta(days=1) # now fetch all remaining changesets chsets.update(self._request(self.json_pushes_url(**kwargs))) ordered = sorted(chsets) log = LOG.info if verbose else LOG.debug if from_is_date: first = chsets[ordered[0]] log("Using {} (pushed on {}) for date {}".format( first['changesets'][-1], datetime.datetime.utcfromtimestamp(first['date']), fromchange, )) if to_is_date: last = chsets[ordered[-1]] log("Using {} (pushed on {}) for date {}".format( last['changesets'][-1], datetime.datetime.utcfromtimestamp(last['date']), tochange, )) if raw: return chsets # sort pushlogs by push id return [chsets[k] for k in ordered]
def pushlog_within_changes(self, fromchange, tochange, raw=False, verbose=True): """ Returns pushlog json objects (python dicts). The result will contains all pushlogs including the pushlogs for fromchange and tochange. These parameters can be dates (date or datetime instances) or changesets (str objects). This will return at least one pushlog. In case of error it will raise a MozRegressionError. """ from_is_date = is_date_or_datetime(fromchange) to_is_date = is_date_or_datetime(tochange) kwargs = {} if not from_is_date: # the first changeset is not taken into account in the result. # let's add it directly with this request chsets = self._request(self.json_pushes_url(changeset=fromchange)) kwargs['fromchange'] = fromchange else: chsets = {} kwargs['startdate'] = fromchange.strftime('%Y-%m-%d') if not to_is_date: kwargs['tochange'] = tochange else: # add one day to take the last day in account kwargs['enddate'] = tochange + datetime.timedelta(days=1) # now fetch all remaining changesets chsets.update(self._request(self.json_pushes_url(**kwargs))) ordered = sorted(chsets) log = self.logger.info if verbose else self.logger.debug if from_is_date: first = chsets[ordered[0]] log("Using {} (pushed on {}) for date {}".format( first['changesets'][-1], datetime.datetime.utcfromtimestamp(first['date']), fromchange, )) if to_is_date: last = chsets[ordered[-1]] log("Using {} (pushed on {}) for date {}".format( last['changesets'][-1], datetime.datetime.utcfromtimestamp(last['date']), tochange, )) if raw: return chsets # sort pushlogs by push id return [chsets[k] for k in ordered]
def _check_date(obj): if is_date_or_datetime(obj): if to_datetime(obj) < time_limit: LOG.info("TaskCluster only keeps builds for one year." " Using %s instead of %s." % (time_limit, obj)) obj = time_limit return obj
def pushes_within_changes(self, fromchange, tochange, verbose=True, **kwargs): """ Returns a list of Push objects, including fromchange and tochange. This will return at least one Push. In case of error it will raise a MozRegressionError. """ from_is_date = is_date_or_datetime(fromchange) to_is_date = is_date_or_datetime(tochange) kwargs = {} if not from_is_date: # the first changeset is not taken into account in the result. # let's add it directly with this request chsets = self.pushes(changeset=fromchange) kwargs['fromchange'] = fromchange else: chsets = [] kwargs['startdate'] = fromchange.strftime('%Y-%m-%d') if not to_is_date: kwargs['tochange'] = tochange else: # add one day to take the last day in account kwargs['enddate'] = ( tochange + datetime.timedelta(days=1)).strftime('%Y-%m-%d') # now fetch all remaining changesets chsets.extend(self.pushes(**kwargs)) log = LOG.info if verbose else LOG.debug if from_is_date: first = chsets[0] log("Using {} (pushed on {}) for date {}".format( first.changeset, first.utc_date, fromchange)) if to_is_date: last = chsets[-1] log("Using {} (pushed on {}) for date {}".format( last.changeset, last.utc_date, tochange)) return chsets
def _check_date(obj): if is_date_or_datetime(obj): if to_datetime(obj) < time_limit: logger.info( "TaskCluster only keeps builds for one year." " Using %s instead of %s." % (time_limit, obj) ) obj = time_limit return obj
def init_worker(self, fetch_config, options): AbstractBuildRunner.init_worker(self, fetch_config, options) self.worker.test_runner.evaluate_started.connect(self.evaluate) self.worker.finished.connect(self.bisection_finished) self.worker.handle_merge.connect(self.handle_merge) self.worker.choose_next_build.connect(self.choose_next_build) good, bad = options.pop('good'), options.pop('bad') if is_date_or_datetime(good) and is_date_or_datetime(bad) \ and not fetch_config.should_use_taskcluster(): handler = NightlyHandler(find_fix=options['find_fix']) else: handler = InboundHandler(find_fix=options['find_fix']) self.worker._bisect_args = (handler, good, bad) self.worker.download_in_background = \ self.global_prefs['background_downloads'] return self.worker.bisect
def init_worker(self, fetch_config, options): AbstractBuildRunner.init_worker(self, fetch_config, options) self.worker.test_runner.evaluate_started.connect(self.evaluate) self.worker.finished.connect(self.bisection_finished) self.worker.handle_merge.connect(self.handle_merge) self.worker.choose_next_build.connect(self.choose_next_build) good, bad = options.get("good"), options.get("bad") if (is_date_or_datetime(good) and is_date_or_datetime(bad) and fetch_config.should_use_archive()): handler = NightlyHandler(find_fix=options["find_fix"]) else: handler = IntegrationHandler(find_fix=options["find_fix"]) self.worker._bisect_args = (handler, good, bad) self.worker.download_in_background = self.global_prefs[ "background_downloads"] if self.global_prefs["approx_policy"]: self.worker.approx_chooser = ApproxPersistChooser(7) return self.worker.bisect
def init_worker(self, fetch_config, options): AbstractBuildRunner.init_worker(self, fetch_config, options) self.download_manager.download_finished.connect( self.worker._on_downloaded) self.worker.launch_arg = options.pop('launch') # evaluate_started will be called if we have an error self.test_runner.evaluate_started.connect(self.on_error) self.worker.error.connect(self.on_error) if is_date_or_datetime(self.worker.launch_arg) and \ not fetch_config.should_use_taskcluster(): return self.worker.launch_nightlies else: return self.worker.launch_inbounds
def push(self, changeset, **kwargs): """ Returns the Push object that match the given changeset or date. A MozRegressionError is thrown if None is found. """ if is_date_or_datetime(changeset): try: return self.pushes_within_changes(changeset, changeset, verbose=False)[-1] except EmptyPushlogError: raise EmptyPushlogError( "No pushes available for the date %s on %s." % (changeset, self.branch)) return self.pushes(changeset=changeset, **kwargs)[0]
def push(self, changeset, **kwargs): """ Returns the Push object that match the given changeset or date. A MozRegressionError is thrown if None is found. """ if is_date_or_datetime(changeset): try: return self.pushes_within_changes(changeset, changeset, verbose=False)[-1] except EmptyPushlogError: raise EmptyPushlogError( "No pushes available for the date %s on %s." % (changeset, self.branch) ) return self.pushes(changeset=changeset, **kwargs)[0]
def find_build_info(self, changeset, fetch_txt_info=True, check_changeset=False): """ Find build info for an inbound build, given a changeset or a date. if `check_changeset` is True, the given changeset might be partial (< 40 chars) because it will be verified and updated using json pushes. Return a :class:`InboundBuildInfo` instance. """ if is_date_or_datetime(changeset): changeset = self.jpushes.revision_for_date(changeset) check_changeset = False # find a task id if check_changeset: try: changeset = self._check_changeset(changeset) except MozRegressionError, exc: raise BuildInfoNotFound(str(exc))
def validate(self): """ Validate the options, define the `action` and `fetch_config` that should be used to run the application. """ options = self.options user_defined_bits = options.bits is not None options.bits = parse_bits(options.bits or mozinfo.bits) fetch_config = create_config(options.app, mozinfo.os, options.bits, mozinfo.processor) if options.build_type: try: fetch_config.set_build_type(options.build_type) except MozRegressionError as msg: self.logger.warning( "%s (Defaulting to %r)" % (msg, fetch_config.build_type) ) self.fetch_config = fetch_config fetch_config.set_repo(options.repo) if fetch_config.is_nightly(): fetch_config.set_base_url(options.archive_base_url) if not user_defined_bits and \ options.bits == 64 and \ mozinfo.os == 'win' and \ 32 in fetch_config.available_bits(): # inform users on windows that we are using 64 bit builds. self.logger.info("bits option not specified, using 64-bit builds.") if options.bits == 32 and mozinfo.os == 'mac': self.logger.info("only 64-bit builds available for mac, using " "64-bit builds") if fetch_config.is_inbound() and fetch_config.tk_needs_auth(): creds = tc_authenticate(self.logger) fetch_config.set_tk_credentials(creds) # set action for just use changset or data to bisect if options.launch: options.launch = self._convert_to_bisect_arg(options.launch) self.action = "launch_inbound" if is_date_or_datetime(options.launch) and \ not fetch_config.should_use_taskcluster(): self.action = "launch_nightlies" else: # define good/bad default values if required default_good_date, default_bad_date = \ get_default_date_range(fetch_config) if options.find_fix: default_bad_date, default_good_date = \ default_good_date, default_bad_date if not options.bad: options.bad = default_bad_date self.logger.info("No 'bad' option specified, using %s" % options.bad) else: options.bad = self._convert_to_bisect_arg(options.bad) if not options.good: options.good = default_good_date self.logger.info("No 'good' option specified, using %s" % options.good) else: options.good = self._convert_to_bisect_arg(options.good) self.action = "bisect_inbounds" if is_date_or_datetime(options.good) and \ is_date_or_datetime(options.bad): if not options.find_fix and \ to_datetime(options.good) > to_datetime(options.bad): raise MozRegressionError( ("Good date %s is later than bad date %s." " Maybe you wanted to use the --find-fix" " flag?") % (options.good, options.bad)) elif options.find_fix and \ to_datetime(options.good) < to_datetime(options.bad): raise MozRegressionError( ("Bad date %s is later than good date %s." " You should not use the --find-fix flag" " in this case...") % (options.bad, options.good)) if not fetch_config.should_use_taskcluster(): self.action = "bisect_nightlies" if self.action in ('launch_inbound', 'bisect_inbounds')\ and not fetch_config.is_inbound(): raise MozRegressionError('Unable to bisect inbound for `%s`' % fetch_config.app_name) options.preferences = preferences(options.prefs_files, options.prefs, self.logger) # convert GiB to bytes. options.persist_size_limit = \ int(abs(float(options.persist_size_limit)) * 1073741824)
def validate(self): """ Validate the options, define the `action` and `fetch_config` that should be used to run the application. """ options = self.options user_defined_bits = options.bits is not None options.bits = parse_bits(options.bits or mozinfo.bits) fetch_config = create_config(options.app, mozinfo.os, options.bits) try: fetch_config.set_build_type(options.build_type) except MozRegressionError as msg: self.logger.warning( "%s (Defaulting to %r)" % (msg, fetch_config.build_type) ) self.fetch_config = fetch_config fetch_config.set_repo(options.repo) if fetch_config.is_nightly(): fetch_config.set_base_url(options.archive_base_url) if not user_defined_bits and \ options.bits == 64 and \ mozinfo.os == 'win' and \ 32 in fetch_config.available_bits(): # inform users on windows that we are using 64 bit builds. self.logger.info("bits option not specified, using 64-bit builds.") if options.bits == 32 and mozinfo.os == 'mac': self.logger.info("only 64-bit builds available for mac, using " "64-bit builds") if fetch_config.is_inbound() and fetch_config.tk_needs_auth(): creds = tc_authenticate(self.logger) fetch_config.set_tk_credentials(creds) # set action for just use changset or data to bisect if options.launch: options.launch = self._convert_to_bisect_arg(options.launch) self.action = "launch_inbound" if is_date_or_datetime(options.launch) and \ not fetch_config.should_use_taskcluster(): self.action = "launch_nightlies" else: # define good/bad default values if required default_good_date, default_bad_date = \ get_default_date_range(fetch_config) if options.find_fix: default_bad_date, default_good_date = \ default_good_date, default_bad_date if not options.bad: options.bad = default_bad_date self.logger.info("No 'bad' option specified, using %s" % options.bad) else: options.bad = self._convert_to_bisect_arg(options.bad) if not options.good: options.good = default_good_date self.logger.info("No 'good' option specified, using %s" % options.good) else: options.good = self._convert_to_bisect_arg(options.good) self.action = "bisect_inbounds" if is_date_or_datetime(options.good) and \ is_date_or_datetime(options.bad): if not options.find_fix and \ to_datetime(options.good) > to_datetime(options.bad): raise MozRegressionError( ("Good date %s is later than bad date %s." " Maybe you wanted to use the --find-fix" " flag?") % (options.good, options.bad)) elif options.find_fix and \ to_datetime(options.good) < to_datetime(options.bad): raise MozRegressionError( ("Bad date %s is later than good date %s." " You should not use the --find-fix flag" " in this case...") % (options.bad, options.good)) if not fetch_config.should_use_taskcluster(): self.action = "bisect_nightlies" if self.action in ('launch_inbound', 'bisect_inbounds')\ and not fetch_config.is_inbound(): raise MozRegressionError('Unable to bisect inbound for `%s`' % fetch_config.app_name) options.preferences = preferences(options.prefs_files, options.prefs) # convert GiB to bytes. options.persist_size_limit = \ int(abs(float(options.persist_size_limit)) * 1073741824)
def validate(self): """ Validate the options, define the `action` and `fetch_config` that should be used to run the application. """ options = self.options user_defined_bits = options.bits is not None options.bits = parse_bits(options.bits or mozinfo.bits) if options.arch is not None: if options.app != "gve": self.logger.warning("--arch ignored for non-GVE app.") options.arch = None fetch_config = create_config(options.app, mozinfo.os, options.bits, mozinfo.processor, options.arch) if options.lang: if options.app != "firefox-l10n": raise MozRegressionError( "--lang is only valid with --app=firefox-l10n") fetch_config.set_lang(options.lang) elif options.app == "firefox-l10n": raise MozRegressionError( "app 'firefox-l10n' requires a --lang argument") if options.build_type: try: fetch_config.set_build_type(options.build_type) except MozRegressionError as msg: self.logger.warning("%s (Defaulting to %r)" % (msg, fetch_config.build_type)) self.fetch_config = fetch_config fetch_config.set_repo(options.repo) fetch_config.set_base_url(options.archive_base_url) if (not user_defined_bits and options.bits == 64 and mozinfo.os == "win" and 32 in fetch_config.available_bits()): # inform users on windows that we are using 64 bit builds. self.logger.info("bits option not specified, using 64-bit builds.") if options.bits == 32 and mozinfo.os == "mac": self.logger.info("only 64-bit builds available for mac, using " "64-bit builds") if fetch_config.is_integration() and fetch_config.tk_needs_auth(): creds = tc_authenticate(self.logger) fetch_config.set_tk_credentials(creds) # set action for just use changset or data to bisect if options.launch: options.launch = self._convert_to_bisect_arg(options.launch) self.action = "launch_integration" if is_date_or_datetime( options.launch) and fetch_config.should_use_archive(): self.action = "launch_nightlies" else: # define good/bad default values if required default_good_date, default_bad_date = get_default_date_range( fetch_config) if options.find_fix: default_bad_date, default_good_date = ( default_good_date, default_bad_date, ) if not options.bad: options.bad = default_bad_date self.logger.info("No 'bad' option specified, using %s" % options.bad) else: options.bad = self._convert_to_bisect_arg(options.bad) if not options.good: options.good = default_good_date self.logger.info("No 'good' option specified, using %s" % options.good) else: options.good = self._convert_to_bisect_arg(options.good) self.action = "bisect_integration" if is_date_or_datetime(options.good) and is_date_or_datetime( options.bad): if not options.find_fix and to_datetime( options.good) > to_datetime(options.bad): raise MozRegressionError( ("Good date %s is later than bad date %s." " Maybe you wanted to use the --find-fix" " flag?") % (options.good, options.bad)) elif options.find_fix and to_datetime( options.good) < to_datetime(options.bad): raise MozRegressionError( ("Bad date %s is later than good date %s." " You should not use the --find-fix flag" " in this case...") % (options.bad, options.good)) if fetch_config.should_use_archive(): self.action = "bisect_nightlies" if (self.action in ("launch_integration", "bisect_integration") and not fetch_config.is_integration()): raise MozRegressionError("Unable to bisect integration for `%s`" % fetch_config.app_name) options.preferences = preferences(options.prefs_files, options.prefs, self.logger) # convert GiB to bytes. options.persist_size_limit = int( abs(float(options.persist_size_limit)) * 1073741824)