def test_allow_import_of_files_found_in_modules_during_parallel_check( self, tmpdir: LocalPath) -> None: test_directory = tmpdir / "test_directory" test_directory.mkdir() spam_module = test_directory / "spam.py" spam_module.write("'Empty'") init_module = test_directory / "__init__.py" init_module.write("'Empty'") # For multiple jobs we could not find the `spam.py` file. with tmpdir.as_cwd(): self._runtest( [ "-j2", "--disable=missing-docstring, missing-final-newline", "test_directory", ], code=0, ) # A single job should be fine as well with tmpdir.as_cwd(): self._runtest( [ "-j1", "--disable=missing-docstring, missing-final-newline", "test_directory", ], code=0, )
def test_pylint_run_jobs_equal_zero_dont_crash_with_cpu_fraction( tmpdir: LocalPath, ) -> None: """Check that the pylint runner does not crash if `pylint.lint.run._query_cpu` determines only a fraction of a CPU core to be available. """ builtin_open = open def _mock_open(*args, **kwargs): if args[0] == "/sys/fs/cgroup/cpu/cpu.cfs_quota_us": return mock_open(read_data=b"-1")(*args, **kwargs) if args[0] == "/sys/fs/cgroup/cpu/cpu.shares": return mock_open(read_data=b"2")(*args, **kwargs) return builtin_open(*args, **kwargs) pathlib_path = pathlib.Path def _mock_path(*args, **kwargs): if args[0] == "/sys/fs/cgroup/cpu/cpu.shares": return MagicMock(is_file=lambda: True) return pathlib_path(*args, **kwargs) filepath = os.path.abspath(__file__) testargs = [filepath, "--jobs=0"] with tmpdir.as_cwd(): with pytest.raises(SystemExit) as err: with patch("builtins.open", _mock_open): with patch("pylint.lint.run.Path", _mock_path): Run(testargs, reporter=Reporter()) assert err.value.code == 0
def test_do_not_import_files_from_local_directory_with_pythonpath( tmpdir: LocalPath, ) -> None: p_astroid = tmpdir / "astroid.py" p_astroid.write("'Docstring'\nimport completely_unknown\n") p_hmac = tmpdir / "hmac.py" p_hmac.write("'Docstring'\nimport completely_unknown\n") # Appending a colon to PYTHONPATH should not break path stripping # https://github.com/PyCQA/pylint/issues/3636 with tmpdir.as_cwd(): orig_pythonpath = os.environ.get("PYTHONPATH") os.environ["PYTHONPATH"] = f"{(orig_pythonpath or '').strip(':')}:" subprocess.check_output( [ sys.executable, "-m", "pylint", "astroid.py", "--disable=import-error,unused-import", ], cwd=str(tmpdir), ) if orig_pythonpath: os.environ["PYTHONPATH"] = orig_pythonpath else: del os.environ["PYTHONPATH"]
def test_import_plugin_from_local_directory_if_pythonpath_cwd( tmpdir: LocalPath, ) -> None: p_plugin = tmpdir / "plugin.py" p_plugin.write("# Some plugin content") with tmpdir.as_cwd(): orig_pythonpath = os.environ.get("PYTHONPATH") if sys.platform == "win32": os.environ["PYTHONPATH"] = "." else: os.environ[ "PYTHONPATH"] = f"{(orig_pythonpath or '').strip(':')}:." process = subprocess.run( [ sys.executable, "-m", "pylint", "--load-plugins", "plugin", ], cwd=str(tmpdir), stderr=subprocess.PIPE, check=False, ) assert ( "AttributeError: module 'plugin' has no attribute 'register'" in process.stderr.decode()) if orig_pythonpath: os.environ["PYTHONPATH"] = orig_pythonpath else: del os.environ["PYTHONPATH"]
def test_do_not_import_files_from_local_directory( tmpdir: LocalPath) -> None: p_astroid = tmpdir / "astroid.py" p_astroid.write("'Docstring'\nimport completely_unknown\n") p_hmac = tmpdir / "hmac.py" p_hmac.write("'Docstring'\nimport completely_unknown\n") with tmpdir.as_cwd(): subprocess.check_output( [ sys.executable, "-m", "pylint", "astroid.py", "--disable=import-error,unused-import", ], cwd=str(tmpdir), ) # Linting this astroid file does not import it with tmpdir.as_cwd(): subprocess.check_output( [ sys.executable, "-m", "pylint", "-j2", "astroid.py", "--disable=import-error,unused-import", ], cwd=str(tmpdir), ) # Test with multiple jobs for hmac.py for which we have a # CVE against: https://github.com/PyCQA/pylint/issues/959 with tmpdir.as_cwd(): subprocess.call( [ sys.executable, "-m", "pylint", "-j2", "hmac.py", "--disable=import-error,unused-import", ], cwd=str(tmpdir), )
def test_runner_with_arguments(runner: Callable, tmpdir: LocalPath) -> None: """Check the runners with arguments as parameter instead of sys.argv.""" filepath = os.path.abspath(__file__) testargs = [filepath] with tmpdir.as_cwd(): with pytest.raises(SystemExit) as err: runner(testargs) assert err.value.code == 0
def test_runner(runner: Callable, tmpdir: LocalPath) -> None: filepath = os.path.abspath(__file__) testargs = ["", filepath] with tmpdir.as_cwd(): with patch.object(sys, "argv", testargs): with pytest.raises(SystemExit) as err: runner() assert err.value.code == 0
def test_pyproject_common_statements(runner: CliRunner, tmpdir: LocalPath) -> None: """Allow common_statements to be defined in pyproject.toml""" pyproject_toml = tmpdir / "pyproject.toml" pyproject_toml.write(PYPROJECT_CONFIG) test_file = tmpdir / "source.py" test_file.write(PYPROJECT_CONFIG_TEST_SOURCE) with tmpdir.as_cwd(): result = runner.invoke(cli, [str(test_file)]) assert result.exit_code == 0 assert test_file.read() == PYPROJECT_CONFIG_FIXED_SOURCE
def test_global_and_local_config( runner: CliRunner, tmpdir: LocalPath, create_global_conf: bool, use_local_conf: bool, create_pyproject: bool, expected_imports: str, ) -> None: """ Test interaction between the following: - presence of the global config file $XDG_CONFIG_HOME/autoimport/config.toml - use of the --config-file flag to specify a local config file - presence of a pyproject.toml file """ conf_global = '[common_statements]\n"G" = "from g import G"' conf_local = '[common_statements]\n"L" = "from l import L"' conf_pyproject = '[tool.autoimport.common_statements]\n"P" = "from p import P"' code_path = tmpdir / "code.py" original_code = dedent(""" G L P """) code_path.write(original_code) args: List[str] = [str(code_path)] env: Dict[str, Optional[str]] = {} if create_global_conf: xdg_home = tmpdir / "xdg_home" env["XDG_CONFIG_HOME"] = str( Path(xdg_home).resolve()) # must be absolute path global_conf_path = xdg_home / "autoimport" / "config.toml" global_conf_path.ensure() global_conf_path.write(conf_global) if use_local_conf: local_conf_path = tmpdir / "cfg" / "local.toml" local_conf_path.ensure() local_conf_path.write(conf_local) args.extend(["--config-file", str(local_conf_path)]) if create_pyproject: pyproject_path = tmpdir / "pyproject.toml" pyproject_path.write(conf_pyproject) with tmpdir.as_cwd(): result = runner.invoke(cli, args, env=env) assert result.exit_code == 0 assert code_path.read() == expected_imports + "\n" + original_code
def test_cpu_project(tmpdir, datadir): """ Test end to end functionality of creating, deploying and using a denzel project (CPU) :param tmpdir: tmpdir :type tmpdir: py._path.local.LocalPath """ runner = CliRunner() with tmpdir.as_cwd(): # -------- CPU version -------- result = runner.invoke(cli.startproject, args=['test_project']) # Verify command executed assert result.exit_code == 0 assert 'Successfully built' in result.output # Verify failing commands outside project dir assert all( runner.invoke(cmd).exit_code != 0 for cmd in config.PROJECT_COMMANDS) project_dir = LocalPath(str(tmpdir) + '/test_project') with project_dir.as_cwd(): # Launch project on occupied ports with occupy_port(cli_config.API_PORT): result = runner.invoke(cli.launch) assert result.exit_code != 0 assert 'Error:' in result.output # Copy source files to test project directory shutil.copy(src='{}/info.txt'.format(datadir), dst=project_dir + '/app/assets/') shutil.copy(src='{}/requirements.txt'.format(datadir), dst=project_dir + '/requirements.txt') shutil.copy(src='{}/iris_svc.pkl'.format(datadir), dst=project_dir + '/app/assets/') shutil.copy(src='{}/pipeline.py'.format(datadir), dst=project_dir + '/app/logic/') try: with project_dir.as_cwd(): # Launch project successfully result = runner.invoke(cli.launch) assert result.exit_code == 0 # Wait till all are up start_time = time.time() while True: result = runner.invoke(cli.status) assert result.exit_code == 0 if str(result.output).count('UP') < 5: time.sleep(2) else: break # All is up if time.time() - start_time > 240: raise TimeoutError('Too long installation phase') # Check info endpoint response = requests.get('http://localhost:8000/info') assert response.status_code == 200 assert 'For prediction' in response.text # Check prediction endpoint - synchronous assert_sync() # Check prediction endpoint - asynchronous result = runner.invoke(cli.response, args=['--async']) assert result.exit_code == 0 assert_async() # Revert to synchronous again and check endpoint again result = runner.invoke(cli.response, args=['--sync', '--timeout', '5']) assert result.exit_code == 0 assert_sync() # -------- Check updateenvreqs -------- containers_names = utils.get_containers_names() client = docker.from_env() denzel_container = client.containers.get( containers_names['denzel']) # Assert htop non-existent status_code, output = denzel_container.exec_run('htop') assert b'OCI runtime exec failed' in output # updateenvreqs shutil.copy(src='{}/requirements.sh'.format(datadir), dst=project_dir + '/requirements.sh') result = runner.invoke(cli.updateosreqs) assert result.exit_code == 0 # Wait till all are up start_time = time.time() while True: result = runner.invoke(cli.status) assert result.exit_code == 0 if str(result.output).count('UP') < 5: time.sleep(2) else: break # All is up if time.time() - start_time > 180: raise TimeoutError('Too long installation phase') # Assert htop was installed status_code, output = denzel_container.exec_run( 'htop --version') assert status_code == 0 assert b'Released under the GNU GPL' in output finally: with project_dir.as_cwd(): # Launch project successfully result = runner.invoke(cli.shutdown) assert result.exit_code == 0
def test_cpu_project(tmpdir, datadir): """ Test end to end functionality of creating, deploying and using a denzel project (CPU) :param tmpdir: tmpdir :type tmpdir: py._path.local.LocalPath """ runner = CliRunner() with tmpdir.as_cwd(): # -------- CPU version -------- result = runner.invoke(cli.startproject, args=['test_project']) # Verify command executed assert result.exit_code == 0 assert 'Successfully built' in result.output # Verify failing commands outside project dir assert all(runner.invoke(cmd).exit_code != 0 for cmd in config.PROJECT_COMMANDS) project_dir = LocalPath(str(tmpdir) + '/test_project') with project_dir.as_cwd(): # Launch project on occupied ports with occupy_port(cli_config.API_PORT): result = runner.invoke(cli.launch) assert result.exit_code != 0 assert 'Error:' in result.output # Copy source files to test project directory shutil.copy(src='{}/info.txt'.format(datadir), dst=project_dir + '/app/assets/') shutil.copy(src='{}/requirements.txt'.format(datadir), dst=project_dir + '/requirements.txt') shutil.copy(src='{}/iris_svc.pkl'.format(datadir), dst=project_dir + '/app/assets/') shutil.copy(src='{}/pipeline.py'.format(datadir), dst=project_dir + '/app/logic/') try: with project_dir.as_cwd(): # Launch project successfully result = runner.invoke(cli.launch) assert result.exit_code == 0 # Wait till all are up start_time = time.time() while True: result = runner.invoke(cli.status) assert result.exit_code == 0 if str(result.output).count('UP') < 5: time.sleep(2) else: break # All is up if time.time() - start_time > 180: raise TimeoutError('Too long installation phase') # Check info endpoint response = requests.get('http://localhost:8000/info') assert response.status_code == 200 assert 'For prediction' in response.text # Check prediction endpoint data = { "callback_uri": "http://waithook.com/john_q", "data": {"a123": {"sepal-length": 4.6, "sepal-width": 3.6, "petal-length": 1.0, "petal-width": 0.2}, "b456": {"sepal-length": 6.5, "sepal-width": 3.2, "petal-length": 5.1, "petal-width": 2.0}} } response = requests.post('http://localhost:8000/predict', json=data) assert response.status_code == 200 response_data = response.json() assert response_data['status'].lower() == 'success' task_id = response_data['data']['task_id'] # Check status endpoint response = requests.get('http://localhost:8000/status/{}'.format(task_id)) # Wait for prediction start_time = time.time() while response.json()['status'].lower() == 'pending' and time.time() - start_time < 10: response = requests.get('http://localhost:8000/status/{}'.format(task_id)) time.sleep(1) response_data = response.json() assert response.status_code == 200 assert response_data['status'].lower() == 'success' assert response_data['result']['a123'] == 'Iris-setosa' assert response_data['result']['b456'] == 'Iris-virginica' finally: with project_dir.as_cwd(): # Launch project successfully result = runner.invoke(cli.shutdown) assert result.exit_code == 0
def test_global_and_local_config_precedence(runner: CliRunner, tmpdir: LocalPath) -> None: """ Test precedence of configuration specified in the global config vs pyproject.toml vs --config-file. From low to high priority: - global config file - project-local pyproject.toml file - file specified by the --config-file flag, if any """ conf_global = dedent(""" [common_statements] "G" = "from g import G" "A" = "from ga import A" "B" = "from gb import B" "C" = "from gc import C" """) conf_pyproject = dedent(""" [tool.autoimport.common_statements] "A" = "from pa import A" "C" = "from pc import C" "D" = "from pd import D" """) conf_local = dedent(""" [common_statements] "B" = "from lb import B" "C" = "from lc import C" "D" = "from ld import D" """) code_path = tmpdir / "code.py" original_code = dedent(""" A B C D G """) expected_imports = dedent("""\ from pa import A from lb import B from lc import C from ld import D from g import G """) code_path.write(original_code) args: List[str] = [str(code_path)] env: Dict[str, Optional[str]] = {} # create_global_conf: xdg_home = tmpdir / "xdg_home" env["XDG_CONFIG_HOME"] = str( Path(xdg_home).resolve()) # must be absolute path global_conf_path = xdg_home / "autoimport" / "config.toml" global_conf_path.ensure() global_conf_path.write(conf_global) # use_local_conf: local_conf_path = tmpdir / "cfg" / "local.toml" local_conf_path.ensure() local_conf_path.write(conf_local) args.extend(["--config-file", str(local_conf_path)]) # create_pyproject: pyproject_path = tmpdir / "pyproject.toml" pyproject_path.write(conf_pyproject) with tmpdir.as_cwd(): result = runner.invoke(cli, args, env=env) assert result.exit_code == 0 assert code_path.read() == expected_imports + original_code