示例#1
0
    def test_very_long_cells(self):
        """
        Torture test that long cells do not cause issues
        """
        lorem_ipsum_text = textwrap.dedent("""\
          Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec
          dignissim, ipsum non facilisis tempus, dui felis tincidunt metus,
          nec pulvinar neque odio eget risus. Nulla nisi lectus, cursus
          suscipit interdum at, ultrices sit amet orci. Mauris facilisis
          imperdiet elit, vitae scelerisque ipsum dignissim non. Integer
          consequat malesuada neque sit amet pulvinar. Curabitur pretium
          ut turpis eget aliquet. Maecenas sagittis lacus sed lectus
          volutpat, eu adipiscing purus pulvinar. Maecenas consequat
          luctus urna, eget cursus quam mollis a. Aliquam vitae ornare
          erat, non hendrerit urna. Sed eu diam nec massa egestas pharetra
          at nec tellus. Fusce feugiat lacus quis urna sollicitudin volutpat.
          Quisque at sapien non nibh feugiat tempus ac ultricies purus.
           """)
        lorem_ipsum_text = lorem_ipsum_text.replace("\n"," ") + "\n\n"
        large_lorem_ipsum_text = "".join([lorem_ipsum_text]*3000)

        notebook_name = "lorem_ipsum_long.ipynb"
        nb = v4.new_notebook(
            cells=[
                    v4.new_markdown_cell(source=large_lorem_ipsum_text)
            ]
        )

        with TemporaryDirectory() as td:
            nbfile = os.path.join(td, notebook_name)
            with open(nbfile, 'w') as f:
                write(nb, f, 4)

            (output, resources) = LatexExporter(template_file='article').from_filename(nbfile)
            assert len(output) > 0
示例#2
0
    def convert_figure(self, data_format, data):
        """
        Convert a single SVG figure to PDF.  Returns converted data.
        """

        # Work in a temporary directory
        with TemporaryDirectory() as tmpdir:

            # Write fig to temp file
            input_filename = os.path.join(tmpdir, 'figure.svg')
            # SVG data is unicode text
            with io.open(input_filename, 'w', encoding='utf8') as f:
                f.write(cast_unicode_py2(data))

            # Call conversion application
            output_filename = os.path.join(tmpdir, 'figure.pdf')
            shell = self.command.format(from_filename=input_filename,
                                        to_filename=output_filename)
            subprocess.call(
                shell, shell=True)  # Shell=True okay since input is trusted.

            # Read output from drive
            # return value expects a filename
            if os.path.isfile(output_filename):
                with open(output_filename, 'rb') as f:
                    # PDF is a nb supported binary, data type, so base64 encode.
                    return base64.encodebytes(f.read())
            else:
                raise TypeError("Inkscape svg to pdf conversion failed")
示例#3
0
 def test_simple_addition_model(self):
     # Tests a case where packaging works correctly and
     # the model output matches the expected output
     with TemporaryDirectory() as test_dir:
         self.package_simple_addition_model(
             test_dir, custom_ops=[self.custom_op_path, self.second_custom_op]
         )
示例#4
0
 def test_simple_addition_model_failure(self):
     # Tests a case where the output does not match the expected output
     with TemporaryDirectory() as test_dir:
         with self.assertRaises(ValueError):
             self.package_simple_addition_model(
                 test_dir, do_fail=True, custom_ops=[self.custom_op_path]
             )
示例#5
0
def test_make_sdist():
    # Smoke test of making a complete sdist
    builder = sdist.SdistBuilder.from_ini_path(
        osp.join(samples_dir, 'package1-pkg.ini'))
    with TemporaryDirectory() as td:
        builder.build(td)
        assert_isfile(osp.join(td, 'package1-0.1.tar.gz'))
示例#6
0
    def test_python_deps(self):
        # Test that we can correctly load two different models with the same dependencies
        with TemporaryDirectory() as test_dir1:
            with TemporaryDirectory() as test_dir2:
                path1 = self.package_sklearn_model(test_dir1)
                path2 = self.package_sklearn_model(test_dir2)

                with load_neuropod(path1) as n1:
                    with load_neuropod(path2) as n2:
                        input_data = {
                            "x": np.array([[4, 5]], dtype=np.float64)
                        }
                        self.assertAlmostEqual(
                            n1.infer(input_data)["out"][0], 17)
                        self.assertAlmostEqual(
                            n2.infer(input_data)["out"][0], 17)
示例#7
0
def test_prepare_metadata_for_build_wheel():
    hooks = get_hooks('pkg2')
    with TemporaryDirectory() as metadatadir:
        with modified_env({'PYTHONPATH': BUILDSYS_PKGS}):
            hooks.prepare_metadata_for_build_wheel(metadatadir, {})

        assert_isfile(pjoin(metadatadir, 'pkg2-0.5.dist-info', 'METADATA'))
    def package_simple_addition_model(self, do_fail=False):
        with TemporaryDirectory() as test_dir:
            neuropod_path = os.path.join(test_dir, "test_neuropod")
            model_code_dir = os.path.join(test_dir, "model_code")
            os.makedirs(model_code_dir)

            with open(os.path.join(model_code_dir, "addition_model.py"),
                      "w") as f:
                f.write(ADDITION_MODEL_SOURCE)

            # `create_python_neuropod` runs inference with the test data immediately
            # after creating the neuropod. Raises a ValueError if the model output
            # does not match the expected output.
            create_python_neuropod(
                neuropod_path=neuropod_path,
                model_name="addition_model",
                data_paths=[],
                code_path_spec=[{
                    "python_root":
                    model_code_dir,
                    "dirs_to_package": [
                        ""  # Package everything in the python_root
                    ],
                }],
                entrypoint_package="addition_model",
                entrypoint="get_model",
                # Get the input/output spec along with test data
                **get_addition_model_spec(do_fail=do_fail))

            # Run some additional checks
            check_addition_model(neuropod_path)
示例#9
0
def notebook_server():
    info = {}
    with TemporaryDirectory() as td:
        nbdir = info['nbdir'] = pjoin(td, 'notebooks')
        os.makedirs(pjoin(nbdir, u'sub ∂ir1', u'sub ∂ir 1a'))
        os.makedirs(pjoin(nbdir, u'sub ∂ir2', u'sub ∂ir 1b'))

        info['extra_env'] = {
            'JUPYTER_CONFIG_DIR': pjoin(td, 'jupyter_config'),
            'JUPYTER_RUNTIME_DIR': pjoin(td, 'jupyter_runtime'),
            'IPYTHONDIR': pjoin(td, 'ipython'),
        }
        env = os.environ.copy()
        env.update(info['extra_env'])

        command = [sys.executable, '-m', 'notebook',
                   '--no-browser',
                   '--notebook-dir', nbdir,
                   # run with a base URL that would be escaped,
                   # to test that we don't double-escape URLs
                   '--NotebookApp.base_url=/a@b/',
                   ]
        print("command=", command)
        proc = info['popen'] = Popen(command, cwd=nbdir, env=env)
        info_file_path = pjoin(td, 'jupyter_runtime',
                               'nbserver-%i.json' % proc.pid)
        info.update(_wait_for_server(proc, info_file_path))

        print("Notebook server info:", info)
        yield info

    # Shut the server down
    requests.post(urljoin(info['url'], 'api/shutdown'),
                  headers={'Authorization': 'token '+info['token']})
示例#10
0
    def package_devices_model(self):
        with TemporaryDirectory() as test_dir:
            neuropod_path = os.path.join(test_dir, "test_neuropod")

            # `create_torchscript_neuropod` runs inference with the test data immediately
            # after creating the neuropod. Raises a ValueError if the model output
            # does not match the expected output.
            create_torchscript_neuropod(
                neuropod_path=neuropod_path,
                model_name="devices_model",
                module=DevicesModel(),
                input_spec=[
                    {
                        "name": "x",
                        "dtype": "float32",
                        "shape": (None, )
                    },
                    {
                        "name": "y",
                        "dtype": "float32",
                        "shape": (None, )
                    },
                ],
                output_spec=[
                    {
                        "name": "x",
                        "dtype": "int64",
                        "shape": (None, )
                    },
                    {
                        "name": "y",
                        "dtype": "int64",
                        "shape": (None, )
                    },
                ],
                test_input_data={
                    "x": np.arange(5).astype(np.float32),
                    "y": np.arange(5).astype(np.float32),
                },
                test_expected_out={
                    "x": np.array([0], dtype=np.int64),
                    "y": np.array([1], dtype=np.int64),
                },
                input_tensor_device={"x": "CPU"},
                default_input_tensor_device="GPU",
            )

            # Ensure all inputs are moved to CPU if we run with no visible GPUs
            load_and_test_neuropod(
                neuropod_path,
                test_input_data={
                    "x": np.arange(5).astype(np.float32),
                    "y": np.arange(5).astype(np.float32),
                },
                test_expected_out={
                    "x": np.array([0], dtype=np.int64),
                    "y": np.array([0], dtype=np.int64),
                },
                neuropod_load_args={"visible_gpu": None},
            )
示例#11
0
def notebook_server():
    info = {}
    temp_dir = TemporaryDirectory()
    td = temp_dir.name
    # do not use context manager because of https://github.com/vatlab/sos-notebook/issues/214
    if True:
        nbdir = info['nbdir'] = pjoin(td, 'notebooks')
        os.makedirs(pjoin(nbdir, u'sub ∂ir1', u'sub ∂ir 1a'))
        os.makedirs(pjoin(nbdir, u'sub ∂ir2', u'sub ∂ir 1b'))
        # print(nbdir)
        info['extra_env'] = {
            'JUPYTER_CONFIG_DIR': pjoin(td, 'jupyter_config'),
            'JUPYTER_RUNTIME_DIR': pjoin(td, 'jupyter_runtime'),
            'IPYTHONDIR': pjoin(td, 'ipython'),
        }
        env = os.environ.copy()
        env.update(info['extra_env'])

        command = [
            sys.executable,
            '-m',
            'notebook',
            '--no-browser',
            '--notebook-dir',
            nbdir,
            # run with a base URL that would be escaped,
            # to test that we don't double-escape URLs
            '--NotebookApp.base_url=/a@b/',
        ]
        print("command=", command)
        proc = info['popen'] = Popen(command, cwd=nbdir, env=env)
        info_file_path = pjoin(td, 'jupyter_runtime',
                               'nbserver-%i.json' % proc.pid)
        info.update(_wait_for_server(proc, info_file_path))

        print("Notebook server info:", info)
        yield info

    # manually try to clean up, which would fail under windows because
    # a permission error caused by iPython history.sqlite.
    try:
        temp_dir.cleanup()
    except Exception as ex:
        print(ex)
    # Shut the server down
    requests.post(urljoin(info['url'], 'api/shutdown'),
                  headers={'Authorization': 'token ' + info['token']})
示例#12
0
 def test_strings_model(self):
     # Tests a case where packaging works correctly and
     # the model output matches the expected output
     for model in [
             StringsModel, StringsModelDictInput, StringsModelListOutput
     ]:
         with TemporaryDirectory() as test_dir:
             package_strings_model(test_dir, model=model)
示例#13
0
def test_download():
    wd = WheelDownloader("astsearch==0.1.2", "3.5.1", 64)
    wheel = wd.fetch()
    assert_isfile(wheel)

    with TemporaryDirectory() as td:
        extract_wheel(wheel, target_dir=td)
        assert_isfile(pjoin(td, 'astsearch.py'))
示例#14
0
def test_build_editable():
    with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
        filename = buildapi.build_editable(td)
        assert filename.endswith('.whl'), filename
        assert_isfile(osp.join(td, filename))
        assert zipfile.is_zipfile(osp.join(td, filename))
        with zipfile.ZipFile(osp.join(td, filename)) as zip:
            assert "module1.py" not in zip.namelist()
            assert "module1.pth" in zip.namelist()
示例#15
0
def compressed_data():
    data_path = yatest.common.source_path(os.path.join("catboost", "pytest", "data"))
    tmp_dir = TemporaryDirectory()
    for file_name in os.listdir(data_path):
        if file_name.endswith('.zip'):
            with zipfile.ZipFile(os.path.join(data_path, file_name)) as zip_file:
                zip_file.extractall(path=tmp_dir.name)

    return tmp_dir
示例#16
0
def test_build_sdist(mock_pip_install):
    with modified_env({'PYTHONPATH': BUILDSYS_PKGS}), \
            TemporaryDirectory() as outdir:
        filename = build_sdist(pjoin(SAMPLES_DIR, 'pkg1'), outdir)
        assert_isfile(pjoin(outdir, filename))
        assert tarfile.is_tarfile(pjoin(outdir, filename))

    assert mock_pip_install.call_count == 2
    assert mock_pip_install.call_args_list[0] == call(['eg_buildsys'])
    assert mock_pip_install.call_args_list[1] == call(['frog'])
示例#17
0
    def test_save_load_test_data(self):
        with TemporaryDirectory() as test_dir:
            neuropod_path = os.path.join(test_dir, "test_neuropod")
            os.mkdir(neuropod_path)

            test_input = {"x": 3, "y": 4}
            test_expected_output = {"out": 7}

            save_test_data(neuropod_path, test_input, test_expected_output)
            test_data = load_test_data(neuropod_path)
            self.assertEquals(test_input, test_data["test_input"])
            self.assertEquals(test_expected_output, test_data["test_output"])
示例#18
0
def test_build_wheel():
    hooks = get_hooks('pkg1')
    with TemporaryDirectory() as builddir:
        with modified_env({'PYTHONPATH': BUILDSYS_PKGS}):
            whl_file = hooks.build_wheel(builddir, {})

        assert whl_file.endswith('.whl')
        assert os.sep not in whl_file

        whl_file = pjoin(builddir, whl_file)
        assert_isfile(whl_file)
        assert zipfile.is_zipfile(whl_file)
    def package_named_tuple_model(self, do_fail=False):
        with TemporaryDirectory() as test_dir:
            neuropod_path = os.path.join(test_dir, "test_neuropod")

            # `create_torchscript_neuropod` runs inference with the test data immediately
            # after creating the neuropod. Raises a ValueError if the model output
            # does not match the expected output.
            create_torchscript_neuropod(
                neuropod_path=neuropod_path,
                model_name="named_tuple_model",
                module=NamedTupleModel(),
                input_spec=[
                    {
                        "name": "x",
                        "dtype": "float32",
                        "shape": ("batch_size", )
                    },
                    {
                        "name": "y",
                        "dtype": "float32",
                        "shape": ("batch_size", )
                    },
                ],
                output_spec=[
                    {
                        "name": "sum",
                        "dtype": "float32",
                        "shape": ("batch_size", )
                    },
                    {
                        "name": "difference",
                        "dtype": "float32",
                        "shape": ("batch_size", ),
                    },
                    {
                        "name": "product",
                        "dtype": "float32",
                        "shape": ("batch_size", )
                    },
                ],
                test_input_data={
                    "x": np.arange(5, dtype=np.float32),
                    "y": np.arange(5, dtype=np.float32),
                },
                test_expected_out={
                    "sum":
                    np.zeros(5) if do_fail else np.arange(5) + np.arange(5),
                    "difference":
                    np.zeros(5) if do_fail else np.arange(5) - np.arange(5),
                    "product":
                    np.zeros(5) if do_fail else np.arange(5) * np.arange(5),
                },
            )
    def package_mixed_types_model(self, do_fail=False):
        with TemporaryDirectory() as test_dir:
            neuropod_path = os.path.join(test_dir, "test_neuropod")

            # `create_torchscript_neuropod` runs inference with the test data immediately
            # after creating the neuropod. Raises a ValueError if the model output
            # does not match the expected output.
            create_torchscript_neuropod(
                neuropod_path=neuropod_path,
                model_name="mixed_types_model",
                module=MixedReturnTypesModel(),
                # Get the input/output spec along with test data
                **get_mixed_model_spec(do_fail=do_fail))
示例#21
0
def test_prepare_metadata_for_build_wheel_no_fallback():
    hooks = get_hooks('pkg2')

    with TemporaryDirectory() as metadatadir:
        with modified_env({'PYTHONPATH': BUILDSYS_PKGS}):
            with pytest.raises(HookMissing) as exc_info:
                hooks.prepare_metadata_for_build_wheel(
                    metadatadir, {}, _allow_fallback=False
                )

            e = exc_info.value
            assert 'prepare_metadata_for_build_wheel' == e.hook_name
            assert 'prepare_metadata_for_build_wheel' in str(e)
示例#22
0
    def test_consistent_hash(self):
        # Packages the same model twice and ensures it has the same hash
        shas = []
        for i in range(2):
            with TemporaryDirectory() as test_dir:
                self.package_simple_addition_model(
                    test_dir, custom_ops=[self.custom_op_path, self.second_custom_op]
                )

                neuropod_path = os.path.join(test_dir, "test_neuropod")
                shas.append(sha256sum(neuropod_path))

        self.assertEqual(shas[0], shas[1])
示例#23
0
 def test_stateful_model(self):
     # `init_op` can be passed a list of strings or a string
     for init_op_name_as_list in [False, True]:
         with TemporaryDirectory() as test_dir:
             neuropod_path = os.path.join(test_dir, "test_neuropod")
             self.package_accumulator_model(neuropod_path,
                                            init_op_name_as_list)
             neuropod_obj = load_neuropod(neuropod_path)
             np.testing.assert_equal(neuropod_obj.name, "accumulator_model")
             np.testing.assert_equal(neuropod_obj.platform, "tensorflow")
             np.testing.assert_equal(
                 neuropod_obj.infer({"x": np.float32(2.0)}), {"out": 2.0})
             np.testing.assert_equal(
                 neuropod_obj.infer({"x": np.float32(4.0)}), {"out": 6.0})
示例#24
0
def test_issue_104():
    hooks = get_hooks('test-for-issue-104')
    with TemporaryDirectory() as outdir:
        with modified_env({
            'PYTHONPATH': BUILDSYS_PKGS,
            'PEP517_ISSUE104_OUTDIR': outdir,
        }):
            hooks.get_requires_for_build_wheel({})
        with open(pjoin(outdir, 'out.json')) as f:
            children = json.load(f)
    assert set(children) <= {
        '__init__.py', '__init__.pyc', '_in_process.py', '_in_process.pyc',
        '__pycache__',
    }
    def package_simple_addition_model(self, do_fail=False):
        for model in [CustomOpModel]:
            with TemporaryDirectory() as test_dir:
                neuropod_path = os.path.join(test_dir, "test_neuropod")

                # `create_torchscript_neuropod` runs inference with the test data immediately
                # after creating the neuropod. Raises a ValueError if the model output
                # does not match the expected output.
                create_torchscript_neuropod(
                    neuropod_path=neuropod_path,
                    model_name="addition_model",
                    module=model(),
                    custom_ops=[self.custom_op_path, self.second_custom_op],
                    # Get the input/output spec along with test data
                    **get_addition_model_spec(do_fail=do_fail))
示例#26
0
    def test_noncontiguous_array(self):
        x = np.arange(16).astype(np.int64).reshape(4, 4)

        with TemporaryDirectory() as test_dir:
            neuropod_path = os.path.join(test_dir, "test_neuropod")
            model_code_dir = os.path.join(test_dir, "model_code")
            os.makedirs(model_code_dir)

            with open(os.path.join(model_code_dir, "splitter_model.py"),
                      "w") as f:
                f.write(NONCONTIGUOUS_MODEL_SOURCE)

            create_python_neuropod(
                neuropod_path=neuropod_path,
                model_name="splitter",
                data_paths=[],
                code_path_spec=[{
                    "python_root":
                    model_code_dir,
                    "dirs_to_package": [
                        ""  # Package everything in the python_root
                    ],
                }],
                entrypoint_package="splitter_model",
                entrypoint="get_model",
                input_spec=[{
                    "name": "x",
                    "dtype": "int64",
                    "shape": (4, 4)
                }],
                output_spec=[
                    {
                        "name": "x1",
                        "dtype": "int64",
                        "shape": (4, 2)
                    },
                    {
                        "name": "x2",
                        "dtype": "int64",
                        "shape": (4, 2)
                    },
                ],
                test_input_data={"x": x},
                test_expected_out={
                    "x1": x[:, :2],
                    "x2": x[:, 2:]
                },
            )
示例#27
0
def test_build_sdist():
    hooks = get_hooks('pkg1')
    with TemporaryDirectory() as sdistdir:
        with modified_env({'PYTHONPATH': BUILDSYS_PKGS}):
            sdist = hooks.build_sdist(sdistdir, {})

        assert sdist.endswith('.tar.gz')
        assert os.sep not in sdist

        sdist = pjoin(sdistdir, sdist)
        assert_isfile(sdist)
        assert tarfile.is_tarfile(sdist)

        with tarfile.open(sdist) as tf:
            contents = tf.getnames()
        assert 'pkg1-0.5/pyproject.toml' in contents
    def test_mixed_types_model_failure_duplicate_item(self):
        # Tests a model that returns duplicate items across multiple dictionaries
        # This is either a CalledProcessError or a RuntimeError depending on whether
        # we're using the native bindings or not
        with self.assertRaises((CalledProcessError, RuntimeError)):
            with TemporaryDirectory() as test_dir:
                neuropod_path = os.path.join(test_dir, "test_neuropod")

                # `create_torchscript_neuropod` runs inference with the test data immediately
                # after creating the neuropod. Raises a ValueError if the model output
                # does not match the expected output.
                create_torchscript_neuropod(
                    neuropod_path=neuropod_path,
                    model_name="mixed_types_model",
                    module=MixedReturnTypesModelDuplicateItem(),
                    # Get the input/output spec along with test data
                    **get_mixed_model_spec())
示例#29
0
def test_prepare_bin_dir():
    with TemporaryDirectory() as td:
        td = Path(td)
        commands.prepare_bin_directory(td, cmds)
        assert_isfile(td / 'acommand.exe')
        script_file = td / 'acommand-script.py'
        assert_isfile(script_file)

        with script_file.open() as f:
            script_contents = f.read()
        assert script_contents.startswith("#!python")
        assert_in('import extra', script_contents)
        assert_in('somefunc()', script_contents)

        _rewrite_shebangs.main(['_rewrite_shebangs.py', str(td)])
        with script_file.open() as f:
            assert f.read().startswith('#!"')
示例#30
0
    def setUp(self):
        self.td = TemporaryDirectory()
        self.addCleanup(self.td.cleanup)

        self.file_path = os.path.join(self.td.name, 'afile')
        with open(self.file_path, 'w') as f:
            f.write('Blah')

        self.dir_path = os.path.join(self.td.name, 'adir')
        os.mkdir(self.dir_path)

        self.link_path = os.path.join(self.td.name, 'alink')
        if os.name == 'posix':
            # Symlinks are rarely usable on Windows, because a special
            # permission is needed to create them.
            os.symlink(self.file_path, self.link_path)

        self.nonexistant_path = os.path.join(self.td.name, 'doesntexist')