Beispiel #1
0
    def test_one_to_one_only_compares_filenames(import_dir: LocalPath):
        datafiles: List[str] = randomize(
            *glob(os.path.join(get_data_path(), "iris-part-*-of-6.csv")))
        target_metafiles: List[str] = glob(
            os.path.join(get_data_path(), "iris-part-*-of-6.meta"))
        metafiles: List[str] = []
        for meta in target_metafiles:
            dest = import_dir.join(os.path.basename(meta))
            metafiles.append(str(dest))
            LocalPath(meta).copy(dest)
        metafiles = randomize(*metafiles)

        datapath = LocalPath(get_data_path())
        expected_filemap: Dict[str, List[str]] = {
            str(datapath.join("iris-part-1-of-6.csv")):
            [str(import_dir.join("iris-part-1-of-6.meta"))],
            str(datapath.join("iris-part-2-of-6.csv")):
            [str(import_dir.join("iris-part-2-of-6.meta"))],
            str(datapath.join("iris-part-3-of-6.csv")):
            [str(import_dir.join("iris-part-3-of-6.meta"))],
            str(datapath.join("iris-part-4-of-6.csv")):
            [str(import_dir.join("iris-part-4-of-6.meta"))],
            str(datapath.join("iris-part-5-of-6.csv")):
            [str(import_dir.join("iris-part-5-of-6.meta"))],
            str(datapath.join("iris-part-6-of-6.csv")):
            [str(import_dir.join("iris-part-6-of-6.meta"))],
        }

        actual_filemap: Dict[str,
                             List[str]] = filemap(MappingBehavior.ONE_TO_ONE,
                                                  datafiles, metafiles)
        for k in actual_filemap:
            actual_filemap[k].sort()

        assert expected_filemap == actual_filemap
Beispiel #2
0
def main():
    from sys import argv
    argv = argv[1:]
    sources, destination = argv[:-1], argv[-1]

    from py._path.local import LocalPath
    sources = tuple([LocalPath(src) for src in sources])
    destination = LocalPath(destination)

    return make_sdists(sources, destination)
Beispiel #3
0
def main():
    assert 'PIP_INDEX_URL' not in os.environ, os.environ['PIP_INDEX_URL']
    from sys import argv
    argv = argv[1:]
    sources, destination = argv[:-1], argv[-1]

    from py._path.local import LocalPath
    sources = tuple([LocalPath(src) for src in sources])
    destination = LocalPath(destination)

    return make_sdists(sources, destination)
Beispiel #4
0
def pytest_sessionfinish(session, exitstatus):
    """ whole test run finishes. """
    collectArtifactsArgs = session.config.getoption(OPTION_COLLECT_ARTIFACTS)
    compressArtifacts = session.config.getoption(OPTION_COMPRESS_ARTIFACTS)
    if collectArtifactsArgs:
        a = ArtifactCollector()
        if session.config._artifactsOutputPath.exists:
            try:
                session.config._artifactsOutputPath.remove(ignore_errors=True)
            except:
                pass
        session.config._artifactsOutputPath.mkdir()

        for item in collectArtifactsArgs:
            a.add_search_pattern(item)

        for collection in a.collect_using_glob():
            session.config._artifactsCollected.extend(collection)

        if compressArtifacts:
            if len(session.config._artifactsCollected) > 0:
                a.zip_artifact_collection(
                    session.config._artifactsZipFileOutputPath,
                    session.config._artifactsCollected,
                )
        else:
            for artifactFilePath in session.config._artifactsCollected:
                LocalPath(artifactFilePath).copy(
                    session.config._artifactsOutputPath)
Beispiel #5
0
def child_pids(pid):
    """Return a list of direct child PIDs for the given PID."""
    children = set()
    for p in LocalPath('/proc').listdir():
        try:
            stat = open(p.join('stat').strpath).read()
            m = re.match(
                r'^\d+ \(.+?\) '
                # This field, state, is normally a single letter, but can be
                # "0" if there are some unusual security settings that prevent
                # reading the process state (happens under GitHub Actions with
                # QEMU for some reason).
                '[0a-zA-Z] '
                r'(\d+) ',
                stat,
            )
            assert m, stat
            ppid = int(m.group(1))
            if ppid == pid:
                children.add(int(p.basename))
        except OSError:
            # Happens when the process exits after listing it, or between
            # opening stat and reading it.
            pass
    return children
Beispiel #6
0
    def test_one_to_one_pairs_equal_filenames():
        datafiles: List[str] = randomize(
            *glob(os.path.join(get_data_path(), "iris-part-*-of-6.csv")))
        metafiles: List[str] = randomize(
            *glob(os.path.join(get_data_path(), "iris-part-*-of-6.meta")))

        datapath = LocalPath(get_data_path())
        expected_filemap: Dict[str, List[str]] = {
            str(datapath.join("iris-part-1-of-6.csv")):
            [str(datapath.join("iris-part-1-of-6.meta"))],
            str(datapath.join("iris-part-2-of-6.csv")):
            [str(datapath.join("iris-part-2-of-6.meta"))],
            str(datapath.join("iris-part-3-of-6.csv")):
            [str(datapath.join("iris-part-3-of-6.meta"))],
            str(datapath.join("iris-part-4-of-6.csv")):
            [str(datapath.join("iris-part-4-of-6.meta"))],
            str(datapath.join("iris-part-5-of-6.csv")):
            [str(datapath.join("iris-part-5-of-6.meta"))],
            str(datapath.join("iris-part-6-of-6.csv")):
            [str(datapath.join("iris-part-6-of-6.meta"))],
        }

        actual_filemap: Dict[str,
                             List[str]] = filemap(MappingBehavior.ONE_TO_ONE,
                                                  datafiles, metafiles)
        for k in actual_filemap:
            actual_filemap[k].sort()

        assert expected_filemap == actual_filemap
Beispiel #7
0
def pytest_collection(session: pytest.Session):
    session.items = []

    for filename, tests in groupby(coordinator.tests, key=lambda test: test['file']):
        file = MochaFile.from_parent(session, fspath=LocalPath(filename))

        for info in tests:
            requested_fixtures = ['live_server', '_live_server_helper']
            test = MochaTest.from_parent(
                file,
                name='::'.join(info['parents']),
                fixtureinfo=FuncFixtureInfo(
                    argnames=tuple(requested_fixtures),
                    initialnames=tuple(requested_fixtures),
                    names_closure=requested_fixtures,
                    name2fixturedefs={},
                ),
                keywords={
                    'django_db': pytest.mark.django_db(transaction=True),
                }
            )

            session.items.append(test)

    ###
    # NOTE: if this counter remains 0 at end of session, an exit code of 5 will be returned.
    #       This value is normally set by Session.perform_collect(), but we are bypassing that
    #       implementation.
    #
    session.testscollected = len(session.items)

    return session.items
Beispiel #8
0
def datadir():
    """DATADIR as a LocalPath"""
    import os
    here = os.path.split(__file__)[0]
    DATADIR = os.path.join(here, "data")
    from py._path.local import LocalPath
    return LocalPath(DATADIR)
Beispiel #9
0
def child_pids(pid):
    """Return a list of direct child PIDs for the given PID."""
    pid = str(pid)
    tasks = LocalPath('/proc').join(pid, 'task').listdir()
    return set(
        int(child_pid) for task in tasks
        for child_pid in task.join('children').read().split())
Beispiel #10
0
    def test_increment_without_metadata_without_schema(
        self,
        capsys: CaptureFixture,
        archive_dir: LocalPath,
        archive_fixture: "TestArchive.ArchiveCacheAndHashPassthruChecker",
        schema_file: Optional[LocalPath],
        verbose: bool,
    ):
        # List of (expected frame filename, data filename) tuples
        targets: List[Tuple[str, str]] = [
            ("iris-part-1-of-6-combined.csv", "iris-part-1-of-6.csv"),
            ("iris-part-1-2.csv", "iris-part-2-of-6.csv"),
            ("iris-part-1-2-3.csv", "iris-part-3-of-6.csv"),
            ("iris-part-1-2-3-4.csv", "iris-part-4-of-6.csv"),
            ("iris-part-1-2-3-4-5.csv", "iris-part-5-of-6.csv"),
            ("iris_plus.csv", "iris-part-6-of-6.csv"),
        ]

        expected_hashfile = (
            LocalPath(archive_fixture.cache_file).dirpath(DEFAULT_HASH_FILE) if
            archive_fixture.hash_file is None else archive_fixture.hash_file)
        assert not os.path.exists(expected_hashfile)
        assert not os.path.exists(archive_fixture.cache_file)
        assert len(archive_dir.listdir()) == 0

        for expected_frame_filename, data_filename in targets:
            assert archive_fixture(
                archive_dir,
                [os.path.join(get_data_path(), data_filename)],
                cache_filepath=archive_fixture.cache_file,
                hash_filepath=archive_fixture.hash_file,
                verbose=verbose,
            )
            assert_captured_outerr(capsys.readouterr(), verbose, False)

            expected_frame = DataFrame(
                read_csv(
                    os.path.join(get_data_path(), expected_frame_filename),
                    dtype=str,
                    index_col="Index",
                ))
            del expected_frame["Species"]
            del expected_frame["PetalColor"]
            expected_frame.sort_index(inplace=True)
            actual_frame = DataFrame(
                read_csv(str(archive_fixture.cache_file),
                         dtype=str,
                         index_col="Index"))
            actual_frame.sort_index(inplace=True)
            assert_captured_outerr(capsys.readouterr(), False, False)

            assert_frame_equal(expected_frame, actual_frame)
            assert os.path.exists(expected_hashfile)
            assert syphon.check(
                archive_fixture.cache_file,
                hash_filepath=expected_hashfile,
                verbose=verbose,
            )
Beispiel #11
0
def pytest_configure(config):
    """called after command line options have been parsed
    and all plugins and initial conftest files been loaded.
    This hook is called for every plugin.
    """
    config._artifactsCollected = []
    config._artifactsZipFilename = PYTEST_ARTIFACTS_ZIP_FILENAME
    config._artifactsOutputPath = LocalPath(os.getcwd()).join(PYTEST_ARTIFACTS)
    config._artifactsZipFileOutputPath = config._artifactsOutputPath.join(
        PYTEST_ARTIFACTS_ZIP_FILENAME)
Beispiel #12
0
def create_subdirectories(path, amount, depth):
    path = str(path)
    for x in range(amount):
        p = LocalPath(path).join(str(depth))
        p.mkdir()
        for ext in [".a", ".b", ".c", ".d", ""]:
            _f = p.join("testfile%s" % ext)
            _f.write("")
        if not depth == 0:
            depth -= 1
            create_subdirectories(p, 1, depth)
Beispiel #13
0
def child_pids(pid):
    """Return a list of direct child PIDs for the given PID."""
    children = set()
    for p in LocalPath('/proc').listdir():
        stat = p.join('stat')
        if stat.isfile():
            stat = stat.open().read()
            m = re.match('^\d+ \([^\)]+\) [a-zA-Z] (\d+) ', stat)
            assert m, stat
            ppid = int(m.group(1))
            if ppid == pid:
                children.add(int(p.basename))
    return children
Beispiel #14
0
def test_entry_point_detected(_pytest):
    f = get_temp_file("""
    entry-point: factory
    """)
    if hasattr(YamlChaosFile, "from_parent"):
        from py._path.local import LocalPath
        plugin = YamlChaosFile.from_parent(_pytest.request.node,
                                           fspath=LocalPath(f.name))
    else:
        plugin = YamlChaosFile(f.name, _pytest.request.node)

    with pytest.raises(ImportStringError):
        plugin.collect()
    f.close()
Beispiel #15
0
def test_no_entry_point(_pytest):
    f = get_temp_file("""
    entry-point1="factory"
    """)
    if hasattr(TomlChaosFile, "from_parent"):
        from py._path.local import LocalPath
        plugin = TomlChaosFile.from_parent(_pytest.request.node,
                                           fspath=LocalPath(f.name))
    else:
        plugin = TomlChaosFile(f.name, _pytest.request.node)

    with pytest.raises(AssertionError,
                       match="Define entry point to run chaos testing."):
        plugin.collect()
    f.close()
Beispiel #16
0
def child_pids(pid):
    """Return a list of direct child PIDs for the given PID."""
    children = set()
    for p in LocalPath('/proc').listdir():
        try:
            stat = open(p.join('stat').strpath).read()
            m = re.match(r'^\d+ \(.+?\) [a-zA-Z] (\d+) ', stat)
            assert m, stat
            ppid = int(m.group(1))
            if ppid == pid:
                children.add(int(p.basename))
        except OSError:
            # Happens when the process exits after listing it, or between
            # opening stat and reading it.
            pass
    return children
Beispiel #17
0
def mk_dummy_parent(tmp_path, filename, content=""):

    path = tmp_path / filename
    path.write_text(content)

    config = Mock(spec=Config)
    config.rootdir = str(tmp_path)
    session = SimpleNamespace(config=config, _initialpaths=[])
    parent = SimpleNamespace(
        config=config,
        session=session,
        nodeid="dummy",
        fspath=LocalPath(path),
        _path=path,
    )

    return parent
Beispiel #18
0
def find_files(directory, pattern):
    """Searching for pattern in specific directory.
    Returns an list with found files and directories according to
    given pattern.
    :param dirPathToSearch:
    :param pattern: No tilde expansion is done, but *, ?, and
    character ranges expressed with [] will be correctly matched.
    :return: yields a file path
    """
    directory = str(directory)
    log.info("Searching for %s in %s", pattern, directory)
    for root, dirs, files in os.walk(directory):
        for basename in files:
            if fnmatch.fnmatch(basename, pattern):
                filePath = os.path.join(root, basename)
                log.info("found %s", filePath)
                yield LocalPath(filePath)
Beispiel #19
0
    def test_incremental_fails_when_check_fails(
        capsys: CaptureFixture,
        schema: bool,
        archive_dir: LocalPath,
        cache_file: LocalPath,
        hash_file: Optional[LocalPath],
        post_hash: bool,
        verbose: bool,
    ):
        datafile: str = os.path.join(get_data_path(), "iris.csv")
        schema = SortedDict({"0": "Name"})
        schemafile = os.path.join(archive_dir, syphon.schema.DEFAULT_FILE)

        if schema:
            syphon.init(schema, schemafile)
        assert syphon.archive(archive_dir, [datafile],
                              schema_filepath=schemafile if schema else None)
        assert not os.path.exists(os.path.join(get_data_path(), "#lock"))

        expected_frame = DataFrame(
            read_csv(datafile, dtype=str, index_col="Index"))
        expected_frame.sort_index(inplace=True)

        LocalPath(datafile).copy(cache_file)
        assert os.path.exists(cache_file)

        # "check" ought to fail when the hash file does not exist.
        assert not syphon.check(cache_file, hash_filepath=hash_file)
        # If "check" fails, then the incremental build fails.
        assert not syphon.build(
            cache_file,
            *get_data_files(archive_dir),
            hash_filepath=hash_file,
            incremental=True,
            overwrite=True,
            post_hash=post_hash,
            verbose=verbose,
        )
        assert_post_hash(False, cache_file, hash_filepath=hash_file)

        actual_frame = DataFrame(
            read_csv(cache_file, dtype=str, index_col="Index"))
        actual_frame.sort_index(inplace=True)

        assert_frame_equal(expected_frame, actual_frame, check_exact=True)
        assert_captured_outerr(capsys.readouterr(), verbose, False)
Beispiel #20
0
def test_simple(tmpdir):
    target_id3_dir_raw = tmpdir / "id3"
    target_id3_dir = str(target_id3_dir_raw)
    LocalPath('tests/input').copy(target_id3_dir_raw)

    result = CliRunner().invoke(cli, ['list', target_id3_dir])
    empty_mp3_desc = u"""
╒════════════════╤═════════╤══════════╤═════════╕
│ Track number   │ Title   │ Artist   │ Album   │
╞════════════════╧═════════╧══════════╧═════════╡
│ artist name - song name.mp3                   │
├────────────────┬─────────┬──────────┬─────────┤
│                │         │          │         │
└────────────────┴─────────┴──────────┴─────────┘
"""
    assert empty_mp3_desc.strip() in result.output

    result = CliRunner().invoke(cli, [
        'id3', '-p',
        '(?P<album>[^/]+)/(?P<track_num>[0-9]+)?(?P<artist>[^/]+) - (?P<title>[^(]+)\.',
        '--no-confirmation', target_id3_dir
    ])
    assert result.exit_code == 0

    result = CliRunner().invoke(cli, ['list', target_id3_dir])
    filled_mp3_desc = u"""
╒════════════════╤═══════════╤═════════════╤════════════╕
│ Track number   │ Title     │ Artist      │ Album      │
╞════════════════╧═══════════╧═════════════╧════════════╡
│ artist name - song name.mp3                           │
├────────────────┬───────────┬─────────────┬────────────┤
│                │ song name │ artist name │ album name │
└────────────────┴───────────┴─────────────┴────────────┘
"""
    assert filled_mp3_desc.strip() in result.output

    target_rename_dir_raw = tmpdir / "rename"

    result = CliRunner().invoke(cli, [
        'rename', '-o',
        str(target_rename_dir_raw), '-p', '$artist' + os.path.sep +
        '$track_num - $title - $album.mp3', '--no-confirmation', target_id3_dir
    ])
    assert result.exit_code == 0
    assert (target_rename_dir_raw / 'artist name' /
            ' - song name - album name.mp3').isfile()
Beispiel #21
0
def test_check_false(
    capsys: CaptureFixture,
    tmpdir: LocalPath,
    relevant: int,
    failures: int,
    irrelevant: int,
    nonexistant: int,
    hash_file: Optional[LocalPath],
    verbose: bool,
):
    # Generate cache files and their respective hash entries.
    new_hashfile: LocalPath = make_hash_entries(tmpdir, relevant, irrelevant,
                                                nonexistant, hash_file)
    assert new_hashfile.size() > 0

    # Collect all generated cache files.
    cache_files: List[str] = glob(str(new_hashfile.dirpath("*.csv")))
    assert len(cache_files) == relevant

    # Randomly choose files to edit until we've changed the desired number of files.
    edited: Set[str] = set()
    while len(edited) != failures:
        # Choose from the set of files that have not been edited.
        chosen_file = random.choice(list(set(cache_files).difference(edited)))
        assert chosen_file not in edited
        LocalPath(chosen_file).write(rand_string())
        edited.add(chosen_file)

    for cache in cache_files:
        if cache in edited:
            assert not syphon.check(
                cache, hash_filepath=hash_file, verbose=verbose)
        else:
            assert syphon.check(cache,
                                hash_filepath=hash_file,
                                verbose=verbose)
        captured: CaptureResult = capsys.readouterr()
        assert_captured_outerr(captured, verbose, False)
        if verbose:
            assert_matches_outerr(captured,
                                  ["FAILED"] if cache in edited else ["OK"],
                                  [])
def find_dirs_with_test_files_and_requirements_file(path, ignored_dir_names_regex):
    """
    Recursively, lists all directories from `path`,
    excludes ignored directories from the list
    For each directory, check that it contains, at the same level,
    a `requirements.txt` file and files startwith `test` and endwith `.py`

    :param path: pathlib.Path object
    :param ignored_dir_names_regex:
    :return: list of dicts on format
        [{'dir': py._path.local.LocalPath object, 'envname': str }, ]
    """
    dirs = []

    for directory in path.glob('**/'):
        directory_str = str(directory)
        if not ignored_dir_names_regex.match(directory_str):
            dir_contains_requirements_file = False
            dir_contains_test_files = False

            # list only the files of directory
            filenames = (file.name for file in directory.iterdir() if file.is_file())

            for filename in filenames:

                if RE_REQUIREMENTS_FILE.match(filename):
                    dir_contains_requirements_file = True

                if RE_TEST_FILES.match(filename):
                    dir_contains_test_files = True

            if dir_contains_requirements_file and dir_contains_test_files:
                # tox require a py._path.local.LocalPath instance.
                directory_local_path = LocalPath(str(directory))
                dirs.append(
                    dict(
                        dir=directory_local_path,
                        envname='{}-{}'.format(ENV_PREFIX, directory.parts[-1])
                    )
                )

    return dirs
Beispiel #23
0
def mk_dummy_parent(tmp_path, filename, content=""):

    path = tmp_path / filename
    path.write_text(content)

    config = Mock(spec=Config)
    config.rootdir = str(tmp_path)
    config.getini.return_value = ["test_*.py", "*_test.py"]
    session = SimpleNamespace(
        config=config, isinitpath=lambda p: True, _initialpaths=[]
    )
    parent = SimpleNamespace(
        config=config,
        session=session,
        nodeid="dummy",
        fspath=LocalPath(path),
        _path=path,
    )

    return parent
Beispiel #24
0
    def test_raises_valueerror_when_metadata_is_inconsistent(
        self,
        archive_meta_params: Tuple[str, str, SortedDict],
        archive_dir: LocalPath,
        import_dir: LocalPath,
    ):
        filename: str
        expectedfilename: str
        schema: SortedDict
        filename, expectedfilename, schema = archive_meta_params

        datafile = os.path.join(get_data_path(), filename + ".csv")
        bad_metafile = LocalPath(
            os.path.join(get_data_path(), filename + "-inconsistent.meta"))
        metafile = import_dir.join(filename + ".meta")
        bad_metafile.copy(metafile)
        schemafile = os.path.join(archive_dir, syphon.schema.DEFAULT_FILE)

        syphon.init(schema, schemafile)

        # Find the column that will be in the message.
        metaframe = DataFrame(read_csv(metafile, dtype=str))
        column: Optional[str] = None
        for column in metaframe.columns:
            if len(metaframe[column].drop_duplicates().values) > 1:
                break
        del metaframe

        assert column is not None
        with pytest.raises(ValueError, match=column):
            syphon.archive(
                archive_dir,
                [datafile],
                meta_files=[metafile],
                schema_filepath=schemafile,
                overwrite=True,
            )

        assert not os.path.exists(
            os.path.join(os.path.dirname(datafile), "#lock"))
Beispiel #25
0
def is_alive(pid):
    """Return whether a process is running with the given PID."""
    return LocalPath('/proc').join(str(pid)).isdir()
"""Common helper functions and values for running tests with py.test."""
from os.path import dirname
from py._path.local import LocalPath

REPO_ROOT_PATH = LocalPath(dirname(dirname(dirname(__file__))))


def pytest_generate_tests(metafunc):
    """
    A test scenarios implementation for py.test, as found at
    http://docs.pytest.org/en/latest/example/parametrize.html
    #a-quick-port-of-testscenarios.  Picks up a ``scenarios`` class variable
    to parametrize all test function calls.
    """
    idlist = []
    argvalues = []
    for scenario in metafunc.cls.scenarios:
        idlist.append(scenario[0])
        items = scenario[1].items()
        argnames = [x[0] for x in items]
        argvalues.append(([x[1] for x in items]))
    metafunc.parametrize(argnames, argvalues, ids=idlist, scope="class")


def slice_dict_from(dict_name, thelist):
    """
    Extract a Python dict definition in Python code such as the settings file
    read as a list of strings.  Return an empty list if not found.
    """
    try:
        start = thelist.index('%s = {' % dict_name)
Beispiel #27
0
def datadir():
    """DATADIR as a LocalPath"""
    from openpyxl.tests.helper import DATADIR
    from py._path.local import LocalPath
    return LocalPath(DATADIR)
Beispiel #28
0
def cwd():
    with LocalPath('/').as_cwd():
        yield
Beispiel #29
0
def process_state(pid):
    """Return a process' state, such as "stopped" or "running"."""
    status = LocalPath('/proc').join(str(pid), 'status').read()
    m = re.search(r'^State:\s+[A-Z] \(([a-z]+)\)$', status, re.MULTILINE)
    return m.group(1)
Beispiel #30
0
def test_cpu_project(tmpdir, datadir):
    """
    Test end to end functionality of creating, deploying and using a denzel project (CPU)
    :param tmpdir: tmpdir
    :type tmpdir: py._path.local.LocalPath
    """

    runner = CliRunner()

    with tmpdir.as_cwd():
        # -------- CPU version --------
        result = runner.invoke(cli.startproject, args=['test_project'])

        # Verify command executed
        assert result.exit_code == 0
        assert 'Successfully built' in result.output

        # Verify failing commands outside project dir
        assert all(
            runner.invoke(cmd).exit_code != 0
            for cmd in config.PROJECT_COMMANDS)

        project_dir = LocalPath(str(tmpdir) + '/test_project')

        with project_dir.as_cwd():
            # Launch project on occupied ports
            with occupy_port(cli_config.API_PORT):
                result = runner.invoke(cli.launch)

                assert result.exit_code != 0
                assert 'Error:' in result.output

        # Copy source files to test project directory
        shutil.copy(src='{}/info.txt'.format(datadir),
                    dst=project_dir + '/app/assets/')
        shutil.copy(src='{}/requirements.txt'.format(datadir),
                    dst=project_dir + '/requirements.txt')
        shutil.copy(src='{}/iris_svc.pkl'.format(datadir),
                    dst=project_dir + '/app/assets/')
        shutil.copy(src='{}/pipeline.py'.format(datadir),
                    dst=project_dir + '/app/logic/')
        try:

            with project_dir.as_cwd():

                # Launch project successfully
                result = runner.invoke(cli.launch)

                assert result.exit_code == 0

                # Wait till all are up
                start_time = time.time()

                while True:
                    result = runner.invoke(cli.status)
                    assert result.exit_code == 0

                    if str(result.output).count('UP') < 5:
                        time.sleep(2)
                    else:
                        break  # All is up

                    if time.time() - start_time > 240:
                        raise TimeoutError('Too long installation phase')

                # Check info endpoint
                response = requests.get('http://localhost:8000/info')

                assert response.status_code == 200
                assert 'For prediction' in response.text

                # Check prediction endpoint - synchronous
                assert_sync()

                # Check prediction endpoint - asynchronous
                result = runner.invoke(cli.response, args=['--async'])
                assert result.exit_code == 0
                assert_async()

                # Revert to synchronous again and check endpoint again
                result = runner.invoke(cli.response,
                                       args=['--sync', '--timeout', '5'])
                assert result.exit_code == 0
                assert_sync()

                # -------- Check updateenvreqs --------
                containers_names = utils.get_containers_names()
                client = docker.from_env()
                denzel_container = client.containers.get(
                    containers_names['denzel'])

                # Assert htop non-existent
                status_code, output = denzel_container.exec_run('htop')
                assert b'OCI runtime exec failed' in output

                # updateenvreqs
                shutil.copy(src='{}/requirements.sh'.format(datadir),
                            dst=project_dir + '/requirements.sh')
                result = runner.invoke(cli.updateosreqs)
                assert result.exit_code == 0

                # Wait till all are up
                start_time = time.time()

                while True:
                    result = runner.invoke(cli.status)
                    assert result.exit_code == 0

                    if str(result.output).count('UP') < 5:
                        time.sleep(2)
                    else:
                        break  # All is up

                    if time.time() - start_time > 180:
                        raise TimeoutError('Too long installation phase')

                # Assert htop was installed
                status_code, output = denzel_container.exec_run(
                    'htop --version')
                assert status_code == 0
                assert b'Released under the GNU GPL' in output

        finally:
            with project_dir.as_cwd():
                # Launch project successfully
                result = runner.invoke(cli.shutdown)
                assert result.exit_code == 0