def setup(self): try: import mozproxy # noqa: F401 except ImportError: # Install mozproxy and its vendored deps. mozbase = pathlib.Path(self.mach_cmd.topsrcdir, "testing", "mozbase") mozproxy_deps = ["mozinfo", "mozlog", "mozproxy"] for i in mozproxy_deps: install_package(self.mach_cmd.virtualenv_manager, pathlib.Path(mozbase, i)) # set MOZ_HOST_BIN to find cerutil. Required to set certifcates on android os.environ["MOZ_HOST_BIN"] = self.mach_cmd.bindir
def _setup(self): venv = self.mach_cmd.virtualenv_manager try: from influxdb import InfluxDBClient except ImportError: install_package(venv, "influxdb", ignore_failure=False) from influxdb import InfluxDBClient try: from mozperftest.metrics.perfboard.grafana import Grafana except ImportError: install_package(venv, "grafana_api", ignore_failure=False) from mozperftest.metrics.perfboard.grafana import Grafana if utils.ON_TRY: secret = get_tc_secret() i_host = secret["influx_host"] i_port = secret["influx_port"] i_user = secret["influx_user"] i_password = secret["influx_password"] i_dbname = secret["influx_db"] g_key = secret["grafana_key"] g_host = secret["grafana_host"] g_port = secret["grafana_port"] else: i_host = self.get_arg("influx-host") i_port = self.get_arg("influx-port") i_user = self.get_arg("influx-user") i_password = self.get_arg("influx-password") if i_password is None: raise Exception("You need to set --perfboard-influx-password") i_dbname = self.get_arg("influx-db") g_key = self.get_arg("grafana-key") if g_key is None: raise Exception("You need to set --perfboard-grafana-key") g_host = self.get_arg("grafana-host") g_port = self.get_arg("grafana-port") self.client = InfluxDBClient(i_host, i_port, i_user, i_password, i_dbname) # this will error out if the server is unreachable self.client.ping() self.grafana = Grafana(self, g_key, g_host, g_port)
def run_tests(mach_cmd, **kwargs): """This tests runner can be used directly via main or via Mach. When the --on-try option is used, the test runner looks for the `parameters.yml` artifact that contains all options passed by the used via a ./mach perftest --push-to-try call. """ _setup_path() on_try = kwargs.pop("on_try", False) # trying to get the arguments from the task params if on_try: params = _get_params() kwargs.update(params["try_options"]["perftest"]) from mozperftest.utils import build_test_list, install_package from mozperftest import MachEnvironment, Metadata flavor = kwargs["flavor"] kwargs["tests"] = build_test_list(kwargs["tests"], randomized=flavor != "doc") if flavor == "doc": location = os.path.join(mach_cmd.topsrcdir, "third_party", "python", "esprima") install_package(mach_cmd.virtualenv_manager, location) from mozperftest.scriptinfo import ScriptInfo for test in kwargs["tests"]: print(ScriptInfo(test)) return env = MachEnvironment(mach_cmd, **kwargs) metadata = Metadata(mach_cmd, env, flavor) env.run_hook("before_runs") try: with env.frozen() as e: e.run(metadata) finally: env.run_hook("after_runs")
def before_iterations(kw): global build_generator install_list = kw.get("android_install_apk") if len(install_list) == 0 or all( ["fenix_nightlysim_multicommit" not in apk for apk in install_list]): return # Install gitpython install_package(kw["virtualenv"], "gitpython==3.1.0") import git class _GitProgress(git.RemoteProgress): def update(self, op_code, cur_count, max_count=None, message=""): if message: print(message) # Setup the local fenix github repo print("Cloning fenix repo...") fenix_repo = git.Repo.clone_from( "https://github.com/mozilla-mobile/fenix", tempfile.mkdtemp(), branch="master", progress=_GitProgress(), ) # Get the builds to test architecture = ("arm64-v8a" if "arm64_v8a" in kw.get("android_install_apk") else "armeabi-v7a") json_ = _fetch_json(get_revision_namespace_url, NIGHTLY_SIM_ROUTE, day=kw["test_date"]) namespaces = json_["namespaces"] revisions = [namespace["name"] for namespace in namespaces] tasks = [] for revision in revisions: try: commitdate = fenix_repo.commit(revision).committed_date except ValueError: print("Commit %s is not from the Fenix master branch" % revision) continue json_ = _fetch_json(get_multi_tasks_url, NIGHTLY_SIM_ROUTE, revision, day=kw["test_date"]) for task in json_["tasks"]: route = task["namespace"] task_architecture = route.split(".")[-1] if task_architecture == architecture: tasks.append({ "timestamp": commitdate, "revision": revision, "route": route, "route_suffix": ROUTE_SUFFIX.format(architecture=task_architecture), }) # Set the number of test-iterations to the number of builds kw["test_iterations"] = len(tasks) def _build_iterator(): for task in tasks: revision = task["revision"] timestamp = task["timestamp"] humandate = time.ctime(int(timestamp)) print(f"Testing revision {revision} from {humandate}") download_url = f'{_ROOT_URL}{task["route"]}/{task["route_suffix"]}' yield revision, timestamp, [download_url] build_generator = _build_iterator() return kw
def _run_tests(self, **kwargs): from pathlib import Path from mozperftest.runner import _setup_path from mozperftest.utils import install_package, ON_TRY skip_linters = kwargs.get("skip_linters", False) verbose = kwargs.get("verbose", False) # include in sys.path all deps _setup_path() try: import coverage # noqa except ImportError: pydeps = Path(self.topsrcdir, "third_party", "python") vendors = ["coverage"] if not ON_TRY: vendors.append("attrs") # pip-installing dependencies that require compilation or special setup for dep in vendors: install_package(self.virtualenv_manager, str(Path(pydeps, dep))) if not ON_TRY and not skip_linters: cmd = "./mach lint " if verbose: cmd += " -v" cmd += " " + str(HERE) if not self._run_script( cmd, label="linters", display=verbose, verbose=verbose): raise AssertionError("Please fix your code.") # running pytest with coverage # coverage is done in three steps: # 1/ coverage erase => erase any previous coverage data # 2/ coverage run pytest ... => run the tests and collect info # 3/ coverage report => generate the report tests_dir = Path(HERE, "tests").resolve() tests = kwargs.get("tests", []) if tests == []: tests = str(tests_dir) run_coverage_check = not skip_linters else: run_coverage_check = False def _get_test(test): if Path(test).exists(): return str(test) return str(tests_dir / test) tests = " ".join([_get_test(test) for test in tests]) import pytest options = "-xs" if kwargs.get("verbose"): options += "v" if run_coverage_check: assert self._run_python_script("coverage", "erase", label="remove old coverage data") args = [ "run", pytest.__file__, options, "--duration", "10", tests, ] assert self._run_python_script("coverage", *args, label="running tests", verbose=verbose, display=verbose) if run_coverage_check and not self._run_python_script( "coverage", "report", display=True): raise ValueError("Coverage is too low!")
def test_install_package(): vem = mock.Mock() vem.bin_path = "someplace" install_package(vem, "foo") vem._run_pip.assert_called()
def run_tests(self, **kwargs): MachCommandBase._activate_virtualenv(self) from pathlib import Path from mozperftest.runner import _setup_path from mozperftest.utils import install_package, temporary_env skip_linters = kwargs.get("skip_linters", False) # include in sys.path all deps _setup_path() try: import coverage # noqa except ImportError: pydeps = Path(self.topsrcdir, "third_party", "python") vendors = ["coverage"] if skip_linters: pypis = [] else: pypis = ["flake8"] # if we're not on try we want to install black if not ON_TRY and not skip_linters: pypis.append("black") # these are the deps we are getting from pypi for dep in pypis: install_package(self.virtualenv_manager, dep) # pip-installing dependencies that require compilation or special setup for dep in vendors: install_package(self.virtualenv_manager, str(Path(pydeps, dep))) here = Path(__file__).parent.resolve() if not ON_TRY and not skip_linters: # formatting the code with black assert self._run_python_script("black", str(here)) # checking flake8 correctness if not (ON_TRY and sys.platform == "darwin") and not skip_linters: assert self._run_python_script("flake8", str(here)) # running pytest with coverage # coverage is done in three steps: # 1/ coverage erase => erase any previous coverage data # 2/ coverage run pytest ... => run the tests and collect info # 3/ coverage report => generate the report tests_dir = Path(here, "tests").resolve() tests = kwargs.get("tests", []) if tests == []: tests = str(tests_dir) run_coverage_check = not skip_linters else: run_coverage_check = False def _get_test(test): if Path(test).exists(): return str(test) return str(tests_dir / test) tests = " ".join([_get_test(test) for test in tests]) import pytest with temporary_env(COVERAGE_RCFILE=str(here / ".coveragerc")): if run_coverage_check: assert self._run_python_script( "coverage", "erase", label="remove old coverage data") args = [ "run", pytest.__file__, "-xs", tests, ] assert self._run_python_script("coverage", *args, label="running tests") if run_coverage_check and not self._run_python_script( "coverage", "report", display=True): raise ValueError("Coverage is too low!")
def setup(self): """Install browsertime and visualmetrics.py prerequisites and the Node.js package. """ node = self.get_arg("node") if node is not None: os.environ["NODEJS"] = node super(BrowsertimeRunner, self).setup() install_url = self.get_arg("install-url") tests = self.get_arg("tests", []) if len(tests) != 1: # we don't support auto-discovery (no test passed) or multiple # tests here yet. raise NotImplementedError() self._test_script = ScriptInfo(tests[0]) # installing Python deps on the fly for dep in ("Pillow==%s" % PILLOW_VERSION, "pyssim==%s" % PYSSIM_VERSION): install_package(self.virtualenv_manager, dep, ignore_failure=True) # check if the browsertime package has been deployed correctly # for this we just check for the browsertime directory presence if (self.visualmetrics_py.exists() and self.browsertime_js.exists() and not self.get_arg("clobber")): return # preparing ~/.mozbuild/browsertime for file in ("package.json", "package-lock.json"): src = BROWSERTIME_SRC_ROOT / file target = self.state_path / file # Overwrite the existing files shutil.copyfile(str(src), str(target)) package_json_path = self.state_path / "package.json" if install_url is not None: self.info( "Updating browsertime node module version in {package_json_path} " "to {install_url}", install_url=install_url, package_json_path=str(package_json_path), ) expr = r"/tarball/[a-f0-9]{40}$" if not re.search(expr, install_url): raise ValueError( "New upstream URL does not end with {}: '{}'".format( expr[:-1], install_url)) with package_json_path.open() as f: existing_body = json.loads( f.read(), object_pairs_hook=collections.OrderedDict) existing_body["devDependencies"]["browsertime"] = install_url updated_body = json.dumps(existing_body) with package_json_path.open("w") as f: f.write(updated_body) self._setup_node_packages(package_json_path)
def setup(self): # Install mozproxy and its vendored deps. mozbase = os.path.join(self.mach_cmd.topsrcdir, "testing", "mozbase") mozproxy_deps = ["mozinfo", "mozlog", "mozproxy"] for i in mozproxy_deps: install_package(self.mach_cmd.virtualenv_manager, os.path.join(mozbase, i))
def setup(self): # Install edgeping and requests deps = ["edgeping==0.1", "requests==2.9.1"] for dep in deps: install_package(self.mach_cmd.virtualenv_manager, dep)
def setup(self): """Install browsertime and visualmetrics.py prerequisites and the Node.js package.""" node = self.get_arg("node") if node is not None: os.environ["NODEJS"] = node super(BrowsertimeRunner, self).setup() install_url = self.get_arg("install-url") # installing Python deps on the fly visualmetrics = self.get_arg("visualmetrics", False) if visualmetrics: # installing Python deps on the fly for dep in get_dependencies(): install_package(self.virtualenv_manager, dep, ignore_failure=True) # check if the browsertime package has been deployed correctly # for this we just check for the browsertime directory presence # we also make sure the visual metrics module is there *if* # we need it if not self._should_install() and not self.get_arg("clobber"): return # preparing ~/.mozbuild/browsertime for file in ("package.json", "package-lock.json"): src = BROWSERTIME_SRC_ROOT / file target = self.state_path / file # Overwrite the existing files shutil.copyfile(str(src), str(target)) package_json_path = self.state_path / "package.json" if install_url is not None: self.info( "Updating browsertime node module version in {package_json_path} " "to {install_url}", install_url=install_url, package_json_path=str(package_json_path), ) expr = r"/tarball/[a-f0-9]{40}$" if not re.search(expr, install_url): raise ValueError( "New upstream URL does not end with {}: '{}'".format( expr[:-1], install_url ) ) with package_json_path.open() as f: existing_body = json.loads( f.read(), object_pairs_hook=collections.OrderedDict ) existing_body["devDependencies"]["browsertime"] = install_url updated_body = json.dumps(existing_body) with package_json_path.open("w") as f: f.write(updated_body) self._setup_node_packages(package_json_path)