示例#1
0
def test_run(tmpdir: py.path.local) -> None:
    pairs = ['GBPUSD', 'USDJPY']
    result_q: 'Queue[Result]' = Queue()
    equity_file = tmpdir.join('Equity.csv')
    execution_file = tmpdir.join('Execution.csv')
    equity_result = EquityResult(
        pd.Timestamp('2020-07-15 22:18:23'), Decimal('111.1'),
        Decimal('2222.22'), {
            'total': Decimal('33.333'),
            'GBPUSD': Decimal('4.4444'),
            'USDJPY': Decimal('5.55555')
        })
    execution_result = ExecutionResult(pd.Timestamp('2020-07-14 22:20:00'),
                                       'USDJPY', Decimal('2.22'),
                                       Decimal('99.9'))

    frh = FileResultHandler(pairs, result_q, tmpdir)
    result_q.put(equity_result)
    result_q.put(execution_result)
    result_q.put(None)
    frh.run()

    equity_file_result = equity_file.readlines()
    execution_file_result = execution_file.readlines()
    assert equity_file_result[0] == 'Timestamp,Equity,Balance,UPL[Total],' + \
        'UPL[GBPUSD],UPL[USDJPY]\n'
    assert equity_file_result[1] == \
        '2020-07-15 22:18:23,111.1,2222.22,33.333,4.4444,5.55555\n'
    assert execution_file_result[0] == 'Timestamp,Pair,Units,Price\n'
    assert execution_file_result[1] == '2020-07-14 22:20:00,USDJPY,2.22,99.9\n'
示例#2
0
def test_save_history_no_dir(model_name: str, tmpdir: py.path.local,
                             new_history: History):
    shutil.rmtree(tmpdir.join("models"))

    save_history(new_history, model_name)

    assert tmpdir.join("models").check(dir=1)
示例#3
0
def test_save_model_no_dir(model_name: str, tmpdir: py.path.local,
                           fake_model: Model):
    shutil.rmtree(tmpdir.join("models"))

    fake_model.__asf_model_name = f"{model_name}:some_old_tag"
    save_model(fake_model, "test_save_model")

    assert tmpdir.join("models").check(dir=1)
示例#4
0
def test_find_all_md_files_only_returns_md_files(tmpdir: py.path.local):
    files_to_make = 5
    # Create some markdown and non markdown files
    for i in range(0, files_to_make):
        file = tmpdir.join(f'test{i}.md')
        file.write('# Heading 1')
    not_a_md_file = tmpdir.join('notmd.html')
    not_a_md_file.write('<h1>no markdown here</h1>')

    found_files = find_all_md_files(tmpdir.strpath)

    assert len(found_files) == files_to_make
示例#5
0
def test_module_add_preinstalled(tmpdir: py.path.local, modules_v2_dir: str, snippetcompiler_clean, caplog) -> None:
    """
    Verify that `inmanta module add` respects preinstalled modules when they're compatible and logs a warning when they're
    not.
    """
    module_name: str = "mymodule"
    pip_index = PipIndex(artifact_dir=str(tmpdir.join("pip-index")))
    snippetcompiler_clean.setup_for_snippet(snippet="", autostd=False, python_package_sources=[pip_index.url])

    # preinstall 1.0.0, don't publish to index
    module_from_template(
        os.path.join(modules_v2_dir, "minimalv2module"),
        str(tmpdir.join(module_name, "1.0.0")),
        new_name=module_name,
        new_version=Version("1.0.0"),
        install=True,
    )
    # publish 1.1.0 and 2.0.0 to index
    module_from_template(
        os.path.join(modules_v2_dir, "minimalv2module"),
        str(tmpdir.join(module_name, "1.1.0")),
        new_name=module_name,
        new_version=Version("1.1.0"),
        install=False,
        publish_index=pip_index,
    )
    module_from_template(
        os.path.join(modules_v2_dir, "minimalv2module"),
        str(tmpdir.join(module_name, "2.0.0")),
        new_name=module_name,
        new_version=Version("2.0.0"),
        install=False,
        publish_index=pip_index,
    )

    # verify that compatible constraint does not reinstall or update
    ModuleTool().add(module_req=f"{module_name}~=1.0", v2=True, override=True)
    caplog.clear()
    with caplog.at_level(logging.WARNING):
        assert ModuleTool().get_module(module_name).version == Version("1.0.0")
        assert "does not match constraint" not in caplog.text

    # verify that incompatible constraint does reinstall and logs a warning
    with caplog.at_level(logging.WARNING):
        ModuleTool().add(module_req=f"{module_name}~=2.0", v2=True, override=True)
        assert (
            f"Currently installed {module_name}-1.0.0 does not match constraint ~=2.0: updating to compatible version."
            in caplog.messages
        )
    assert ModuleTool().get_module(module_name).version == Version("2.0.0")
示例#6
0
def test_init_(tmpdir: py.path.local) -> None:
    pairs = ['GBPUSD', 'USDJPY']
    result_q: 'Queue[Result]' = Queue()
    equity_file = tmpdir.join('Equity.csv')
    execution_file = tmpdir.join('Execution.csv')

    frh = FileResultHandler(pairs, result_q, tmpdir)
    frh._close()

    assert os.path.isfile(equity_file)
    assert os.path.isfile(execution_file)

    assert equity_file.read() == 'Timestamp,Equity,Balance,UPL[Total],' + \
        'UPL[GBPUSD],UPL[USDJPY]\n'
    assert execution_file.read() == 'Timestamp,Pair,Units,Price\n'
示例#7
0
def config_file(tmpdir: py.path.local, case):
    """Prepares Bioconda config.yaml"""
    if 'add_root_files' in case:
        for fname, data in case['add_root_files'].items():
            with tmpdir.join(fname).open('w') as fdes:
                fdes.write(data)

    data = deepcopy(TEST_CONFIG_YAML)
    if 'config' in case:
        dict_merge(data, case['config'])
    config_fname = tmpdir.join(TEST_CONFIG_YAML_FNAME)
    with config_fname.open('w') as fdes:
        yaml.dump(data, fdes)

    yield config_fname
示例#8
0
def run_autofix_test(
    tmpdir: py.path.local,
    method: typing.Callable[[typing.List[str]], int],
    not_pretty_formatted_path: str,
    formatted_path: str,
) -> None:
    tmpdir.mkdir("src")
    not_pretty_formatted_tmp_path = tmpdir.join("src").join(
        basename(not_pretty_formatted_path))

    # It is a relative paths as KTLint==0.41.0 dropped support for absolute paths
    not_pretty_formatted_tmp_strpath = str(
        tmpdir.bestrelpath(not_pretty_formatted_tmp_path))

    copyfile(not_pretty_formatted_path, not_pretty_formatted_tmp_path)
    with change_dir_context(tmpdir.strpath):
        parameters = ["--autofix", not_pretty_formatted_tmp_strpath]
        status_code = method(parameters)
        if status_code != 1:
            raise UnexpectedStatusCode(parameters=parameters,
                                       expected_status_code=1,
                                       actual_status_code=status_code)

    # file was formatted (shouldn't trigger linter again)
    with change_dir_context(tmpdir.strpath):
        parameters = ["--autofix", not_pretty_formatted_tmp_strpath]
        status_code = method(parameters)
        if status_code != 0:
            raise UnexpectedStatusCode(parameters=parameters,
                                       expected_status_code=0,
                                       actual_status_code=status_code)

    assert not_pretty_formatted_tmp_path.read_text("utf-8") == py.path.local(
        formatted_path).read_text("utf-8")
示例#9
0
def test_3322_module_install_deep_data_files(tmpdir: py.path.local,
                                             snippetcompiler_clean,
                                             modules_v2_dir: str) -> None:
    """
    Verify that module installation includes data files regardless of depth in the directory structure.
    """
    # set up module directory
    module_name: str = "minimalv2module"
    module_path: str = str(tmpdir.join(module_name))
    module_from_template(
        os.path.join(modules_v2_dir, module_name),
        module_path,
    )
    deep_model_file_rel: str = os.path.join(
        "model",
        *(str(i) for i in range(10)),
        "mymod.cf",
    )
    os.makedirs(os.path.join(module_path,
                             os.path.dirname(deep_model_file_rel)))
    open(os.path.join(module_path, deep_model_file_rel), "w").close()

    # set up simple project and activate snippetcompiler venv
    snippetcompiler_clean.setup_for_snippet("")

    # install module: non-editable mode
    ModuleTool().install(editable=False, path=module_path)

    assert os.path.exists(
        os.path.join(
            env.process_env.site_packages_dir,
            const.PLUGINS_PACKAGE,
            module_name,
            deep_model_file_rel,
        ))
示例#10
0
def test_search_file_subject_is_found_as_topic(tmpdir: py.path.local):

    file = tmpdir.join('test.md')
    file.write('''---
                  subject: test topic
                  ---
                  # Heading 1
                  search term
               ''')

    search_mode = SearchMode()
    args = {
        'pattern': 'search term',
        'path': file.strpath,
        'tag_str': None,
        'topic': 'test topic',
        'case_insensitive': False,
        'title': None,
        'full_path': False,
    }
    options: ModeOptions = {
        'visual': True,
        'args': args,
    }
    search_mode.start(options)
    assert len(search_mode.matches) == 1
示例#11
0
def test_canonical_dataarray_to_grib_with_grib_keys(
    canonic_da: xr.DataArray, tmpdir: py.path.local
) -> None:
    out_path = tmpdir.join("res.grib")
    grib_keys = {"gridType": "regular_ll"}
    with open(str(out_path), "wb") as file:
        xarray_to_grib.canonical_dataarray_to_grib(canonic_da, file, grib_keys=grib_keys)
示例#12
0
def test_disabled_errors_are_parsed_from_config_file(tmpdir: py.path.local):
    """Test that when an error is disbled in the config file it gets parsed
       correctly

    TODO: parametrize test

    """
    config_file = tmpdir.join('config_file.toml')
    config_file.write("""
[check]
disable_math_error = true
                      """)

    c = Config()
    opts = c.parse(
        [f'--config-file={config_file.strpath}', 'check', 'some_docs'], )

    assert 'disabled_errors' in opts['check']
    assert len(opts['check']['disabled_errors']) == len(ALL_ERRORS)

    found = False
    for error in opts['check']['disabled_errors']:
        if error['dest'] == 'd-math-error':
            found = True
            assert error['value'] == True
            break

    # Makes sure that the error whas actually present
    assert found
示例#13
0
def test_search_file_pattern_and_title_is_found(tmpdir: py.path.local):

    file = tmpdir.join('test.md')
    file.write('''---
                  title: Essay
                  ---
                  # Heading 1
                  search term
               ''')

    search_mode = SearchMode()
    args = {
        'pattern': 'search term',
        'path': file.strpath,
        'tag_str': None,
        'topic': None,
        'case_insensitive': False,
        'title': 'essay',  # Note: is lowercase but should still be found
        'full_path': False,
    }
    options: ModeOptions = {
        'visual': True,
        'args': args,
    }
    search_mode.start(options)
    assert len(search_mode.matches) == 1
示例#14
0
def test_canonical_dataset_to_grib(canonic_da: xr.DataArray, tmpdir: py.path.local) -> None:
    out_path = tmpdir.join("res.grib")
    canonic_ds = canonic_da.to_dataset(name="t")
    with pytest.warns(FutureWarning):
        xarray_to_grib.canonical_dataset_to_grib(canonic_ds, str(out_path))

    xarray_to_grib.canonical_dataset_to_grib(canonic_ds, str(out_path), no_warn=True)
示例#15
0
def test_search_file_finds_pattern_but_not_title(tmpdir: py.path.local):

    file = tmpdir.join('test.md')
    file.write('''---
                  title: Essay
                  ---
                  # Heading 1
                  search term
               ''')

    search_mode = SearchMode()
    args = {
        'pattern': 'search term',
        'path': file.strpath,
        'tag_str': None,
        'topic': None,
        'case_insensitive': False,
        'title': 'not in the doc',
        'full_path': False,
    }
    options: ModeOptions = {
        'visual': True,
        'args': args,
    }
    search_mode.start(options)
    assert len(search_mode.matches) == 0
示例#16
0
def test_pattern_is_found_correctly(
    tmpdir: py.path.local,
    file_contents: str,
    pattern: str,
    n_matches: int,
):

    file = tmpdir.join('test.md')
    file.write(file_contents)

    search_mode = SearchMode()
    args = {
        'pattern': pattern,
        'path': file.strpath,
        'tag_str': None,
        'topic': None,
        'case_insensitive': False,
        'title': None,
        'full_path': False,
    }
    options: ModeOptions = {
        'visual': True,
        'args': args,
    }
    search_mode.start(options)

    c = 0
    for match in search_mode.matches:
        for _ in match['matched_lines']:
            c += 1

    assert c == n_matches
示例#17
0
def test_canonical_dataarray_to_grib_conflicting_detect_grib_keys(
    canonic_da: xr.DataArray, tmpdir: py.path.local
) -> None:
    out_path = tmpdir.join("res.grib")
    grib_keys = {"gridType": "reduced_ll"}
    with open(str(out_path), "wb") as file:
        with pytest.raises(ValueError):
            xarray_to_grib.canonical_dataarray_to_grib(canonic_da, file, grib_keys=grib_keys)
示例#18
0
def save_figure(request: _pytest.fixtures.FixtureRequest,
                tmpdir: py.path.local) -> Generator[None, None, None]:
    """
    Save a matplotlib figure when "--savefig" is enabled.
    """
    yield
    if request.config.getoption("--savefig") and len(plt.get_fignums()) > 0:
        plt.gcf().savefig(tmpdir.join("test.png"))
示例#19
0
def test(monkeypatch: MonkeyPatch, tmpdir: py.path.local, tail: str,
         expected_error: str):
    """Test valid and invalid values."""
    tmpdir.join("conf.py").write(
        BASE_CONFIG.format(py.path.local(__file__).join("..", "..")))
    tmpdir.join("conf.py").write(tail, mode="a")
    tmpdir.join("index.rst").write(
        "====\nMain\n====\n\n.. toctree::\n    :maxdepth: 2\n.. disqus::")
    monkeypatch.setattr(directives, "_directives",
                        getattr(directives, "_directives").copy())
    monkeypatch.setattr(roles, "_roles", getattr(roles, "_roles").copy())

    srcdir = confdir = str(tmpdir)
    outdir = tmpdir.join("_build", "html")
    doctreedir = outdir.join("doctrees").ensure(dir=True, rec=True)
    app = application.Sphinx(srcdir, confdir, str(outdir), str(doctreedir),
                             "html")

    if not expected_error:
        app.builder.build_all()
        html_body = outdir.join("index.html").read()
        disqus_div = re.findall(r'(<div[^>]+ id="disqus_thread"[^>]*></div>)',
                                html_body)[0]
        assert 'data-disqus-shortname="good"' in disqus_div
        return

    with pytest.raises(errors.ExtensionError) as exc:
        app.builder.build_all()
    assert expected_error == exc.value.args[0]
示例#20
0
def test_config_file_flag_sets_the_config_file(tmpdir: py.path.local):
    """Test that the --config-file sets the internal config file path"""

    config_file = tmpdir.join('test.toml')

    c = Config()
    c.parse([f'--config-file={config_file.strpath}', 'check', 'some_docs'])

    assert c._config_file_path == str(config_file.strpath)
示例#21
0
def test_default_config_file_locations_are_used(tmpdir: py.path.local):
    config_file = tmpdir.join('.notesystem.toml')
    config_file.write("""
[general]
no_visual=true
""")
    folder_to_check = tmpdir.strpath
    c = Config('.notesystem.toml', [folder_to_check])
    opts = c.parse(('check', 'docs'))
    assert opts['general']['no_visual']['value'] == True
def test_moduletool_create_v2(tmp_working_dir: py.path.local) -> None:
    """
    Verify that `inmanta module create` creates a valid v2 module with expected parameters.
    """
    ModuleTool().execute(
        "create", argparse.Namespace(name="my_module", v1=False,
                                     no_input=True))
    mod: module.ModuleV2 = module.ModuleV2(
        project=None, path=str(tmp_working_dir.join("my-module")))
    assert mod.name == "my_module"
示例#23
0
def test_canonical_dataset_to_grib(grib_name: str,
                                   tmpdir: py.path.local) -> None:
    grib_path = os.path.join(SAMPLE_DATA_FOLDER, grib_name + ".grib")
    out_path = str(tmpdir.join(grib_name + ".grib"))

    res = xarray_store.open_dataset(grib_path)

    with pytest.warns(FutureWarning):
        xarray_to_grib.canonical_dataset_to_grib(res, out_path)
    reread = xarray_store.open_dataset(out_path)
    assert res.equals(reread)
示例#24
0
def test_FileIndex_from_indexpath_or_filestream(tmpdir: py.path.local) -> None:
    grib_file = tmpdir.join("file.grib")

    with open(TEST_DATA, "rb") as file:
        grib_file.write_binary(file.read())

    # create index file
    res = messages.FileIndex.from_indexpath_or_filestream(
        messages.FileStream(str(grib_file)), ["paramId"]
    )
    assert isinstance(res, messages.FileIndex)

    # read index file
    res = messages.FileIndex.from_indexpath_or_filestream(
        messages.FileStream(str(grib_file)), ["paramId"]
    )
    assert isinstance(res, messages.FileIndex)

    # do not read nor create the index file
    res = messages.FileIndex.from_indexpath_or_filestream(
        messages.FileStream(str(grib_file)), ["paramId"], indexpath=""
    )
    assert isinstance(res, messages.FileIndex)

    # can't create nor read index file
    res = messages.FileIndex.from_indexpath_or_filestream(
        messages.FileStream(str(grib_file)),
        ["paramId"],
        indexpath=str(tmpdir.join("non-existent-folder").join("non-existent-file")),
    )
    assert isinstance(res, messages.FileIndex)

    # trigger mtime check
    grib_file.remove()
    with open(TEST_DATA, "rb") as file:
        grib_file.write_binary(file.read())

    res = messages.FileIndex.from_indexpath_or_filestream(
        messages.FileStream(str(grib_file)), ["paramId"]
    )
    assert isinstance(res, messages.FileIndex)
def test_get_module_v1(tmp_working_dir: py.path.local):
    metadata_file: str = tmp_working_dir.join("module.yml")
    metadata_file.write("""
name: mod
license: ASL
version: 1.2.3
compiler_version: 2017.2
        """.strip())

    mt = ModuleTool()
    mod: module.Module = mt.get_module()
    assert mod.GENERATION == module.ModuleGeneration.V1
示例#26
0
def test_integration(tmpdir: py.path.local):
    pages_path = tmpdir.join('pages.jl.gz')
    _write_jl(str(pages_path), [
        dict(url='http://example-{}.com/page'.format(i),
             headers={},
             mangled_url=data['status'] == 404,
             **data)
        for i, data in enumerate(25 * [
            {
                'html': '<h1>hi</h1><p>that is a normal page</p>',
                'status': 200,
            },
            {
                'html':
                '<h1>hi</h1><p>that is not a page you are looking for</p>',
                'status': 404,
            },
            {
                'html': '<h2>page not found</h2><p>404</p>',
                'status': 404,
            },
            {
                'html': '<h2>some info</h2><p>hey here</p>',
                'status': 200,
            },
        ])
    ])
    assert pages_path.exists()
    data_prefix = tmpdir.join('data')
    convert_to_text.main([str(pages_path), str(data_prefix)])
    assert tmpdir.join('data.items.jl.gz').exists()
    assert tmpdir.join('data.meta.jl.gz').exists()
    train.main([str(data_prefix), '--show-features'])
    model_path = tmpdir.join('clf.joblib')
    train.main([str(data_prefix), '--save', str(model_path)])
    assert model_path.exists()
    clf = Soft404Classifier(str(model_path))
    assert clf.predict('<h2>page not found</h2><p>404</p>') > 0.5
    assert clf.predict('<h1>some info</h1>') < 0.5
示例#27
0
def test_star_pattern(tmpdir: py.path.local):

    file1 = tmpdir.join('test1.md')
    file1.write("""---
               tags: findme
               ---
               Random content
                """)
    file2 = tmpdir.join('test2.md')
    file2.write("""---
                title: Random title
                tags: findme
                ---
                Random content 2
                """)
    file3 = tmpdir.join('test3.md')
    file3.write("""---
                tags: other
                ---
                Random content
                """)

    search_mode = SearchMode()
    args = {
        'pattern': '*',
        'path': tmpdir.strpath,
        'tag_str': 'findme',
        'topic': None,
        'case_insensitive': False,
        'title': None,
        'full_path': False,
    }
    options: ModeOptions = {
        'visual': True,
        'args': args,
    }
    search_mode.start(options)

    assert len(search_mode.matches) == 2
def test_get_module_v2(tmp_working_dir: py.path.local):
    metadata_file: str = tmp_working_dir.join("setup.cfg")
    metadata_file.write("""
[metadata]
name = inmanta-module-mod
version = 1.2.3
license = ASL

[options]
install_requires =
  inmanta-modules-net ~=0.2.4
  inmanta-modules-std >1.0,<2.5

  cookiecutter~=1.7.0
  cryptography>1.0,<3.5
        """.strip())
    model_dir: py.path.local = tmp_working_dir.join("model")
    os.makedirs(str(model_dir))
    open(str(model_dir.join("_init.cf")), "w").close()

    mt = ModuleTool()
    mod: module.Module = mt.get_module()
    assert mod.GENERATION == module.ModuleGeneration.V2
示例#29
0
def test_cfgrib_cli_to_netcdf(tmpdir: py.path.local) -> None:
    runner = click.testing.CliRunner()

    res = runner.invoke(__main__.cfgrib_cli, ["to_netcdf"])

    assert res.exit_code == 0
    assert res.output == ""

    res = runner.invoke(__main__.cfgrib_cli, ["to_netcdf", TEST_DATA])

    assert res.exit_code == 0
    assert res.output == ""

    out = tmpdir.join("tmp.nc")
    res = runner.invoke(__main__.cfgrib_cli,
                        ["to_netcdf", TEST_DATA, "-o" + str(out), "-cCDS"])

    assert res.exit_code == 0
    assert res.output == ""

    backend_kwargs = '{"time_dims": ["time"]}'
    res = runner.invoke(__main__.cfgrib_cli,
                        ["to_netcdf", TEST_DATA, "-b", backend_kwargs])

    assert res.exit_code == 0
    assert res.output == ""

    backend_kwargs_json = tmpdir.join("temp.json")
    with open(backend_kwargs_json, "w") as f:
        f.write(backend_kwargs)
    res = runner.invoke(
        __main__.cfgrib_cli,
        ["to_netcdf", TEST_DATA, "-b",
         str(backend_kwargs_json)])

    assert res.exit_code == 0
    assert res.output == ""
示例#30
0
def _in_venv(path: py.path.local) -> bool:
    """Attempt to detect if ``path`` is the root of a Virtual Environment by
    checking for the existence of the appropriate activate script."""
    bindir = path.join("Scripts" if sys.platform.startswith("win") else "bin")
    if not bindir.isdir():
        return False
    activates = (
        "activate",
        "activate.csh",
        "activate.fish",
        "Activate",
        "Activate.bat",
        "Activate.ps1",
    )
    return any([fname.basename in activates for fname in bindir.listdir()])