Пример #1
0
def run_autofix_test(
    tmpdir: py.path.local,
    method: typing.Callable[[typing.List[str]], int],
    not_pretty_formatted_path: str,
    formatted_path: str,
) -> None:
    tmpdir.mkdir("src")
    not_pretty_formatted_tmp_path = tmpdir.join("src").join(
        basename(not_pretty_formatted_path))

    # It is a relative paths as KTLint==0.41.0 dropped support for absolute paths
    not_pretty_formatted_tmp_strpath = str(
        tmpdir.bestrelpath(not_pretty_formatted_tmp_path))

    copyfile(not_pretty_formatted_path, not_pretty_formatted_tmp_path)
    with change_dir_context(tmpdir.strpath):
        parameters = ["--autofix", not_pretty_formatted_tmp_strpath]
        status_code = method(parameters)
        if status_code != 1:
            raise UnexpectedStatusCode(parameters=parameters,
                                       expected_status_code=1,
                                       actual_status_code=status_code)

    # file was formatted (shouldn't trigger linter again)
    with change_dir_context(tmpdir.strpath):
        parameters = ["--autofix", not_pretty_formatted_tmp_strpath]
        status_code = method(parameters)
        if status_code != 0:
            raise UnexpectedStatusCode(parameters=parameters,
                                       expected_status_code=0,
                                       actual_status_code=status_code)

    assert not_pretty_formatted_tmp_path.read_text("utf-8") == py.path.local(
        formatted_path).read_text("utf-8")
Пример #2
0
def test_save_history_no_dir(model_name: str, tmpdir: py.path.local,
                             new_history: History):
    shutil.rmtree(tmpdir.join("models"))

    save_history(new_history, model_name)

    assert tmpdir.join("models").check(dir=1)
Пример #3
0
def _check_initialpaths_for_relpath(session: "Session",
                                    fspath: py.path.local) -> Optional[str]:
    for initial_path in session._initialpaths:
        initial_path_ = py.path.local(initial_path)
        if fspath.common(initial_path_) == initial_path_:
            return fspath.relto(initial_path_)
    return None
Пример #4
0
def pytest_ignore_collect(path: py.path.local,
                          config: Config) -> "Optional[Literal[True]]":
    ignore_paths = config._getconftest_pathlist("collect_ignore",
                                                path=path.dirpath())
    ignore_paths = ignore_paths or []
    excludeopt = config.getoption("ignore")
    if excludeopt:
        ignore_paths.extend([py.path.local(x) for x in excludeopt])

    if py.path.local(path) in ignore_paths:
        return True

    ignore_globs = config._getconftest_pathlist("collect_ignore_glob",
                                                path=path.dirpath())
    ignore_globs = ignore_globs or []
    excludeglobopt = config.getoption("ignore_glob")
    if excludeglobopt:
        ignore_globs.extend([py.path.local(x) for x in excludeglobopt])

    if any(fnmatch.fnmatch(str(path), str(glob)) for glob in ignore_globs):
        return True

    allow_in_venv = config.getoption("collect_in_virtualenv")
    if not allow_in_venv and _in_venv(path):
        return True
    return None
Пример #5
0
def test_run(tmpdir: py.path.local) -> None:
    pairs = ['GBPUSD', 'USDJPY']
    result_q: 'Queue[Result]' = Queue()
    equity_file = tmpdir.join('Equity.csv')
    execution_file = tmpdir.join('Execution.csv')
    equity_result = EquityResult(
        pd.Timestamp('2020-07-15 22:18:23'), Decimal('111.1'),
        Decimal('2222.22'), {
            'total': Decimal('33.333'),
            'GBPUSD': Decimal('4.4444'),
            'USDJPY': Decimal('5.55555')
        })
    execution_result = ExecutionResult(pd.Timestamp('2020-07-14 22:20:00'),
                                       'USDJPY', Decimal('2.22'),
                                       Decimal('99.9'))

    frh = FileResultHandler(pairs, result_q, tmpdir)
    result_q.put(equity_result)
    result_q.put(execution_result)
    result_q.put(None)
    frh.run()

    equity_file_result = equity_file.readlines()
    execution_file_result = execution_file.readlines()
    assert equity_file_result[0] == 'Timestamp,Equity,Balance,UPL[Total],' + \
        'UPL[GBPUSD],UPL[USDJPY]\n'
    assert equity_file_result[1] == \
        '2020-07-15 22:18:23,111.1,2222.22,33.333,4.4444,5.55555\n'
    assert execution_file_result[0] == 'Timestamp,Pair,Units,Price\n'
    assert execution_file_result[1] == '2020-07-14 22:20:00,USDJPY,2.22,99.9\n'
Пример #6
0
    def _collectfile(
        self, path: py.path.local, handle_dupes: bool = True
    ) -> Sequence[nodes.Collector]:
        fspath = Path(path)
        assert (
            path.isfile()
        ), "{!r} is not a file (isdir={!r}, exists={!r}, islink={!r})".format(
            path, path.isdir(), path.exists(), path.islink()
        )
        ihook = self.gethookproxy(path)
        if not self.isinitpath(path):
            if ihook.pytest_ignore_collect(
                fspath=fspath, path=path, config=self.config
            ):
                return ()

        if handle_dupes:
            keepduplicates = self.config.getoption("keepduplicates")
            if not keepduplicates:
                duplicate_paths = self.config.pluginmanager._duplicatepaths
                if path in duplicate_paths:
                    return ()
                else:
                    duplicate_paths.add(path)

        return ihook.pytest_collect_file(fspath=fspath, path=path, parent=self)  # type: ignore[no-any-return]
Пример #7
0
def test_save_model_no_dir(model_name: str, tmpdir: py.path.local,
                           fake_model: Model):
    shutil.rmtree(tmpdir.join("models"))

    fake_model.__asf_model_name = f"{model_name}:some_old_tag"
    save_model(fake_model, "test_save_model")

    assert tmpdir.join("models").check(dir=1)
Пример #8
0
 def _try_load_conftest(
     self, anchor: py.path.local, importmode: Union[str, ImportMode]
 ) -> None:
     self._getconftestmodules(anchor, importmode)
     # let's also consider test* subdirs
     if anchor.check(dir=1):
         for x in anchor.listdir("test*"):
             if x.check(dir=1):
                 self._getconftestmodules(x, importmode)
Пример #9
0
def test_find_all_md_files_only_returns_md_files(tmpdir: py.path.local):
    files_to_make = 5
    # Create some markdown and non markdown files
    for i in range(0, files_to_make):
        file = tmpdir.join(f'test{i}.md')
        file.write('# Heading 1')
    not_a_md_file = tmpdir.join('notmd.html')
    not_a_md_file.write('<h1>no markdown here</h1>')

    found_files = find_all_md_files(tmpdir.strpath)

    assert len(found_files) == files_to_make
Пример #10
0
 def _recurse(self, dirpath: py.path.local) -> bool:
     if dirpath.basename == "__pycache__":
         return False
     ihook = self._gethookproxy(dirpath.dirpath())
     if ihook.pytest_ignore_collect(path=dirpath, config=self.config):
         return False
     for pat in self._norecursepatterns:
         if dirpath.check(fnmatch=pat):
             return False
     ihook = self._gethookproxy(dirpath)
     ihook.pytest_collect_directory(path=dirpath, parent=self)
     return True
Пример #11
0
def test_module_add_preinstalled(tmpdir: py.path.local, modules_v2_dir: str, snippetcompiler_clean, caplog) -> None:
    """
    Verify that `inmanta module add` respects preinstalled modules when they're compatible and logs a warning when they're
    not.
    """
    module_name: str = "mymodule"
    pip_index = PipIndex(artifact_dir=str(tmpdir.join("pip-index")))
    snippetcompiler_clean.setup_for_snippet(snippet="", autostd=False, python_package_sources=[pip_index.url])

    # preinstall 1.0.0, don't publish to index
    module_from_template(
        os.path.join(modules_v2_dir, "minimalv2module"),
        str(tmpdir.join(module_name, "1.0.0")),
        new_name=module_name,
        new_version=Version("1.0.0"),
        install=True,
    )
    # publish 1.1.0 and 2.0.0 to index
    module_from_template(
        os.path.join(modules_v2_dir, "minimalv2module"),
        str(tmpdir.join(module_name, "1.1.0")),
        new_name=module_name,
        new_version=Version("1.1.0"),
        install=False,
        publish_index=pip_index,
    )
    module_from_template(
        os.path.join(modules_v2_dir, "minimalv2module"),
        str(tmpdir.join(module_name, "2.0.0")),
        new_name=module_name,
        new_version=Version("2.0.0"),
        install=False,
        publish_index=pip_index,
    )

    # verify that compatible constraint does not reinstall or update
    ModuleTool().add(module_req=f"{module_name}~=1.0", v2=True, override=True)
    caplog.clear()
    with caplog.at_level(logging.WARNING):
        assert ModuleTool().get_module(module_name).version == Version("1.0.0")
        assert "does not match constraint" not in caplog.text

    # verify that incompatible constraint does reinstall and logs a warning
    with caplog.at_level(logging.WARNING):
        ModuleTool().add(module_req=f"{module_name}~=2.0", v2=True, override=True)
        assert (
            f"Currently installed {module_name}-1.0.0 does not match constraint ~=2.0: updating to compatible version."
            in caplog.messages
        )
    assert ModuleTool().get_module(module_name).version == Version("2.0.0")
Пример #12
0
def config_file(tmpdir: py.path.local, case):
    """Prepares Bioconda config.yaml"""
    if 'add_root_files' in case:
        for fname, data in case['add_root_files'].items():
            with tmpdir.join(fname).open('w') as fdes:
                fdes.write(data)

    data = deepcopy(TEST_CONFIG_YAML)
    if 'config' in case:
        dict_merge(data, case['config'])
    config_fname = tmpdir.join(TEST_CONFIG_YAML_FNAME)
    with config_fname.open('w') as fdes:
        yaml.dump(data, fdes)

    yield config_fname
Пример #13
0
def test_init_(tmpdir: py.path.local) -> None:
    pairs = ['GBPUSD', 'USDJPY']
    result_q: 'Queue[Result]' = Queue()
    equity_file = tmpdir.join('Equity.csv')
    execution_file = tmpdir.join('Execution.csv')

    frh = FileResultHandler(pairs, result_q, tmpdir)
    frh._close()

    assert os.path.isfile(equity_file)
    assert os.path.isfile(execution_file)

    assert equity_file.read() == 'Timestamp,Equity,Balance,UPL[Total],' + \
        'UPL[GBPUSD],UPL[USDJPY]\n'
    assert execution_file.read() == 'Timestamp,Pair,Units,Price\n'
Пример #14
0
def test_3322_module_install_deep_data_files(tmpdir: py.path.local,
                                             snippetcompiler_clean,
                                             modules_v2_dir: str) -> None:
    """
    Verify that module installation includes data files regardless of depth in the directory structure.
    """
    # set up module directory
    module_name: str = "minimalv2module"
    module_path: str = str(tmpdir.join(module_name))
    module_from_template(
        os.path.join(modules_v2_dir, module_name),
        module_path,
    )
    deep_model_file_rel: str = os.path.join(
        "model",
        *(str(i) for i in range(10)),
        "mymod.cf",
    )
    os.makedirs(os.path.join(module_path,
                             os.path.dirname(deep_model_file_rel)))
    open(os.path.join(module_path, deep_model_file_rel), "w").close()

    # set up simple project and activate snippetcompiler venv
    snippetcompiler_clean.setup_for_snippet("")

    # install module: non-editable mode
    ModuleTool().install(editable=False, path=module_path)

    assert os.path.exists(
        os.path.join(
            env.process_env.site_packages_dir,
            const.PLUGINS_PACKAGE,
            module_name,
            deep_model_file_rel,
        ))
Пример #15
0
def test_pattern_is_found_correctly(
    tmpdir: py.path.local,
    file_contents: str,
    pattern: str,
    n_matches: int,
):

    file = tmpdir.join('test.md')
    file.write(file_contents)

    search_mode = SearchMode()
    args = {
        'pattern': pattern,
        'path': file.strpath,
        'tag_str': None,
        'topic': None,
        'case_insensitive': False,
        'title': None,
        'full_path': False,
    }
    options: ModeOptions = {
        'visual': True,
        'args': args,
    }
    search_mode.start(options)

    c = 0
    for match in search_mode.matches:
        for _ in match['matched_lines']:
            c += 1

    assert c == n_matches
Пример #16
0
def test_search_file_subject_is_found_as_topic(tmpdir: py.path.local):

    file = tmpdir.join('test.md')
    file.write('''---
                  subject: test topic
                  ---
                  # Heading 1
                  search term
               ''')

    search_mode = SearchMode()
    args = {
        'pattern': 'search term',
        'path': file.strpath,
        'tag_str': None,
        'topic': 'test topic',
        'case_insensitive': False,
        'title': None,
        'full_path': False,
    }
    options: ModeOptions = {
        'visual': True,
        'args': args,
    }
    search_mode.start(options)
    assert len(search_mode.matches) == 1
Пример #17
0
def test_update_dependencies_with_valid_path(
        tmpdir: py.path.local,
        package: GreatExpectationsContribPackageManifest):
    requirements_file = tmpdir.mkdir("tmp").join("requirements.txt")
    contents = """
altair>=4.0.0,<5  # package
Click>=7.1.2  # package
mistune>=0.8.4,<2.0.0  # package
numpy>=1.14.1  # package
ruamel.yaml>=0.16,<0.17.18  # package
    """
    requirements_file.write(contents)

    package._update_dependencies(str(requirements_file))
    assert package.dependencies == [
        Dependency(text="altair",
                   link="https://pypi.org/project/altair",
                   version="<5, >=4.0.0"),
        Dependency(text="Click",
                   link="https://pypi.org/project/Click",
                   version=">=7.1.2"),
        Dependency(
            text="mistune",
            link="https://pypi.org/project/mistune",
            version="<2.0.0, >=0.8.4",
        ),
        Dependency(text="numpy",
                   link="https://pypi.org/project/numpy",
                   version=">=1.14.1"),
        Dependency(
            text="ruamel.yaml",
            link="https://pypi.org/project/ruamel.yaml",
            version="<0.17.18, >=0.16",
        ),
    ]
Пример #18
0
def config(tmpdir: py.path.local) -> SDConfig:
    '''Clone the module so we can modify it per test.'''

    cnf = SDConfig()

    data = tmpdir.mkdir('data')
    keys = data.mkdir('keys')
    os.chmod(str(keys), 0o700)
    store = data.mkdir('store')
    tmp = data.mkdir('tmp')
    sqlite = data.join('db.sqlite')

    # GPG 2.1+ requires gpg-agent, see #4013
    gpg_agent_config = str(keys.join('gpg-agent.conf'))
    with open(gpg_agent_config, 'w+') as f:
        f.write('allow-loopback-pinentry')

    gpg = gnupg.GPG('gpg2', homedir=str(keys))
    for ext in ['sec', 'pub']:
        file_path = path.join(path.dirname(__file__), 'files',
                              'test_journalist_key.{}'.format(ext))
        with open(file_path) as f:
            gpg.import_keys(f.read())

    cnf.SECUREDROP_DATA_ROOT = str(data)
    cnf.GPG_KEY_DIR = str(keys)
    cnf.STORE_DIR = str(store)
    cnf.TEMP_DIR = str(tmp)
    cnf.DATABASE_FILE = str(sqlite)

    # create the db file
    subprocess.check_call(['sqlite3', cnf.DATABASE_FILE, '.databases'])

    return cnf
Пример #19
0
def test_search_file_finds_pattern_but_not_title(tmpdir: py.path.local):

    file = tmpdir.join('test.md')
    file.write('''---
                  title: Essay
                  ---
                  # Heading 1
                  search term
               ''')

    search_mode = SearchMode()
    args = {
        'pattern': 'search term',
        'path': file.strpath,
        'tag_str': None,
        'topic': None,
        'case_insensitive': False,
        'title': 'not in the doc',
        'full_path': False,
    }
    options: ModeOptions = {
        'visual': True,
        'args': args,
    }
    search_mode.start(options)
    assert len(search_mode.matches) == 0
Пример #20
0
def source(request: SubRequest,
           tmpdir: py.path.local) -> typing.Iterator[Source]:
    doc = request.param
    with tmpdir.as_cwd():

        skip = doc.get("skip", False)
        if skip:
            pytest.skip(f"skip set: {skip}")

        # If this is a string, wrap it into a source
        if "path" in doc:
            path = doc["path"]
            if ":" not in path:
                # All files without a protocol should be taken relative to CWD
                _ = (pathlib.Path(f"{request.config.invocation_dir}") /
                     path).resolve()
                filename = str(_)
            else:
                filename = path
            yield FileSource(filename)

        else:
            script = doc["script"]
            script = f"{request.config.invocation_dir}/scripts/{script}"
            args = doc.get("args", [])
            cmd = [script] + args

            # If a connection is defined, then this is a background process
            if "connection" in doc:
                conn = doc["connection"]
                yield ProcessSource(cmd, conn, tmpdir)

            # Otherwise, run the generator which will produce a string
            else:
                yield GeneratedSource(cmd, tmpdir)
Пример #21
0
def test_canonical_dataarray_to_grib_with_grib_keys(
    canonic_da: xr.DataArray, tmpdir: py.path.local
) -> None:
    out_path = tmpdir.join("res.grib")
    grib_keys = {"gridType": "regular_ll"}
    with open(str(out_path), "wb") as file:
        xarray_to_grib.canonical_dataarray_to_grib(canonic_da, file, grib_keys=grib_keys)
Пример #22
0
def test_canonical_dataset_to_grib(canonic_da: xr.DataArray, tmpdir: py.path.local) -> None:
    out_path = tmpdir.join("res.grib")
    canonic_ds = canonic_da.to_dataset(name="t")
    with pytest.warns(FutureWarning):
        xarray_to_grib.canonical_dataset_to_grib(canonic_ds, str(out_path))

    xarray_to_grib.canonical_dataset_to_grib(canonic_ds, str(out_path), no_warn=True)
Пример #23
0
def _folded_skips(
    startdir: py.path.local,
    skipped: Sequence[CollectReport],
) -> List[Tuple[int, str, Optional[int], str]]:
    d = {}  # type: Dict[Tuple[str, Optional[int], str], List[CollectReport]]
    for event in skipped:
        assert event.longrepr is not None
        assert len(event.longrepr) == 3, (event, event.longrepr)
        fspath, lineno, reason = event.longrepr
        # For consistency, report all fspaths in relative form.
        fspath = startdir.bestrelpath(py.path.local(fspath))
        keywords = getattr(event, "keywords", {})
        # folding reports with global pytestmark variable
        # this is workaround, because for now we cannot identify the scope of a skip marker
        # TODO: revisit after marks scope would be fixed
        if (event.when == "setup" and "skip" in keywords
                and "pytestmark" not in keywords):
            key = (fspath, None, reason
                   )  # type: Tuple[str, Optional[int], str]
        else:
            key = (fspath, lineno, reason)
        d.setdefault(key, []).append(event)
    values = []  # type: List[Tuple[int, str, Optional[int], str]]
    for key, events in d.items():
        values.append((len(events), *key))
    return values
Пример #24
0
def test_disabled_errors_are_parsed_from_config_file(tmpdir: py.path.local):
    """Test that when an error is disbled in the config file it gets parsed
       correctly

    TODO: parametrize test

    """
    config_file = tmpdir.join('config_file.toml')
    config_file.write("""
[check]
disable_math_error = true
                      """)

    c = Config()
    opts = c.parse(
        [f'--config-file={config_file.strpath}', 'check', 'some_docs'], )

    assert 'disabled_errors' in opts['check']
    assert len(opts['check']['disabled_errors']) == len(ALL_ERRORS)

    found = False
    for error in opts['check']['disabled_errors']:
        if error['dest'] == 'd-math-error':
            found = True
            assert error['value'] == True
            break

    # Makes sure that the error whas actually present
    assert found
Пример #25
0
    def __init__(self,
                 fspath: py.path.local,
                 parent=None,
                 config=None,
                 session=None,
                 nodeid=None) -> None:
        name = fspath.basename
        if parent is not None:
            rel = fspath.relto(parent.fspath)
            if rel:
                name = rel
            name = name.replace(os.sep, SEP)
        self.fspath = fspath

        session = session or parent.session

        if nodeid is None:
            nodeid = self.fspath.relto(session.config.rootdir)

            if not nodeid:
                nodeid = _check_initialpaths_for_relpath(session, fspath)
            if nodeid and os.sep != SEP:
                nodeid = nodeid.replace(os.sep, SEP)

        super().__init__(name,
                         parent,
                         config,
                         session,
                         nodeid=nodeid,
                         fspath=fspath)

        self._norecursepatterns = self.config.getini("norecursedirs")
Пример #26
0
    def __init__(
        self,
        fspath: py.path.local,
        parent=None,
        config: Optional[Config] = None,
        session: Optional["Session"] = None,
        nodeid: Optional[str] = None,
    ) -> None:
        name = fspath.basename
        if parent is not None:
            rel = fspath.relto(parent.fspath)
            if rel:
                name = rel
            name = name.replace(os.sep, SEP)
        self.fspath = fspath

        session = session or parent.session

        if nodeid is None:
            nodeid = self.fspath.relto(session.config.rootdir)

            if not nodeid:
                nodeid = _check_initialpaths_for_relpath(session, fspath)
            if nodeid and os.sep != SEP:
                nodeid = nodeid.replace(os.sep, SEP)

        super().__init__(name,
                         parent,
                         config,
                         session,
                         nodeid=nodeid,
                         fspath=fspath)
Пример #27
0
def test_search_file_pattern_and_title_is_found(tmpdir: py.path.local):

    file = tmpdir.join('test.md')
    file.write('''---
                  title: Essay
                  ---
                  # Heading 1
                  search term
               ''')

    search_mode = SearchMode()
    args = {
        'pattern': 'search term',
        'path': file.strpath,
        'tag_str': None,
        'topic': None,
        'case_insensitive': False,
        'title': 'essay',  # Note: is lowercase but should still be found
        'full_path': False,
    }
    options: ModeOptions = {
        'visual': True,
        'args': args,
    }
    search_mode.start(options)
    assert len(search_mode.matches) == 1
Пример #28
0
def tmp_home_factory(tmpdir: py.path.local):
    fake_home_dir = str(tmpdir.mkdir('home'))

    def tmp_home():
        return Path(fake_home_dir)

    return tmp_home
Пример #29
0
def model_name(tmpdir: py.path.local):
    model = "unittest_model"
    temp_model_dir = tmpdir.mkdir("models")
    shutil.copytree("tests/data/models/sample_model",
                    temp_model_dir.join(model))
    with mock.patch("src.model.MODELS_DIR", temp_model_dir):
        yield model
Пример #30
0
def fill_mock_conf(conf: T.Dict[T.Text, T.Any],
                   tmpdir: py.path.local) -> T.Text:
    base_dir = tmpdir.mkdir("conf")
    conf_path = base_dir.join("kofi.yml")
    with open(conf_path, "w") as f:
        f.write(yaml.dump(conf))

    return conf_path