def test_repository_filters_with_absolute_relative_path(tmp_path):
    """.lightningignore parsing parses paths starting with / correctly."""
    lightningignore = """
    /ignore_file/test.txt

    /ignore_dir
    """
    (tmp_path / ".lightningignore").write_text(lightningignore)

    # write some data to file and check version
    (tmp_path / "test.txt").write_text(str(uuid.uuid4()))

    # create repo object
    repository = LocalSourceCodeDir(path=Path(tmp_path))
    checksum_a = repository.version

    # only two files in hash
    assert len(repository._non_ignored_files) == 2

    # write file that needs to be ignored
    (tmp_path / "ignore_file").mkdir()
    (tmp_path / "ignore_dir").mkdir()
    (tmp_path / "ignore_file/test.txt").write_text(str(uuid.uuid4()))
    (tmp_path / "ignore_dir/test.txt").write_text(str(uuid.uuid4()))

    # check that version remains the same
    repository = LocalSourceCodeDir(path=Path(tmp_path))
    checksum_b = repository.version

    # still only two files in hash
    assert len(repository._non_ignored_files) == 2

    assert checksum_a == checksum_b
def test_repository_lightningignore(tmp_path):
    """LocalRepository.version uses the assumed checksum correctly."""
    # write .lightningignore file
    lightningignore = """
    # ignore files in this dir
    ignore/

    """
    (tmp_path / ".lightningignore").write_text(lightningignore)

    # write some data to file and check version
    (tmp_path / "test.txt").write_text(str(uuid.uuid4()))

    # create repo object
    repository = LocalSourceCodeDir(path=Path(tmp_path))
    checksum_a = repository.version

    # write file that needs to be ignored
    (tmp_path / "ignore").mkdir()
    (tmp_path / "ignore/test.txt").write_text(str(uuid.uuid4()))

    # check that version remains the same
    repository = LocalSourceCodeDir(path=Path(tmp_path))
    checksum_b = repository.version

    assert checksum_a == checksum_b
def test_repository_package(tmp_path, monkeypatch):
    """LocalRepository.package() ceates package from local dir."""
    cache_path = Path(tmp_path)
    source_path = cache_path / "nested"
    source_path.mkdir(parents=True, exist_ok=True)
    (source_path / "test.txt").write_text("test")

    # set cache location to temp dir
    monkeypatch.setattr(LocalSourceCodeDir, "cache_location", cache_path)

    repository = LocalSourceCodeDir(path=source_path)
    repository.package()

    # test that package is created
    for file in cache_path.glob("**/*"):
        if file.is_file() and file.name.endswith(".tar.gz"):
            assert file.name == f"{repository.version}.tar.gz"
def test_repository_checksum(tmp_path):
    """LocalRepository.checksum() generates a hash of local dir."""
    repository = LocalSourceCodeDir(path=Path(tmp_path))

    test_path = tmp_path / "test.txt"
    version_a = str(uuid.uuid4())
    test_path.write_text(version_a)
    checksum_a = repository.version

    # file contents don't change; checksum is the same
    repository = LocalSourceCodeDir(path=Path(tmp_path))
    test_path.write_text(version_a)
    checksum_b = repository.version
    assert checksum_a == checksum_b

    # file contents change; checksum is different
    repository = LocalSourceCodeDir(path=Path(tmp_path))
    test_path.write_text(str(uuid.uuid4()))
    checksum_c = repository.version

    assert checksum_a != checksum_c
Exemplo n.º 5
0
    def dispatch(
        self,
        on_before_run: Optional[Callable] = None,
        name: str = "",
        **kwargs: Any,
    ):
        """Method to dispatch and run the :class:`~lightning_app.core.app.LightningApp` in the cloud."""
        # not user facing error ideally - this should never happen in normal user workflow
        if not self.entrypoint_file:
            raise ValueError(
                "Entrypoint file not provided. Did you forget to "
                "initialize the Runtime object with `entrypoint_file` argument?"
            )

        # Determine the root of the project: Start at the entrypoint_file and look for nearby Lightning config files,
        # going up the directory structure. The root of the project is where the Lightning config file is located.

        # TODO: verify lightning version
        # _verify_lightning_version()
        config_file = find_config_file(self.entrypoint_file)
        app_config = AppConfig.load_from_file(
            config_file) if config_file else AppConfig()
        root = config_file.parent if config_file else Path(
            self.entrypoint_file).absolute().parent
        cleanup_handle = _prepare_lightning_wheels_and_requirements(root)
        repo = LocalSourceCodeDir(path=root)
        self._check_uploaded_folder(root, repo)
        requirements_file = root / "requirements.txt"
        # The entry point file needs to be relative to the root of the uploaded source file directory,
        # because the backend will invoke the lightning commands relative said source directory
        app_entrypoint_file = Path(
            self.entrypoint_file).absolute().relative_to(root)

        if name:
            # Override the name if provided by the CLI
            app_config.name = name

        app_config.save_to_dir(root)

        print(f"The name of the app is: {app_config.name}")

        work_reqs: List[V1Work] = []
        v1_env_vars = [
            V1EnvVar(name=k, value=v) for k, v in self.env_vars.items()
        ]
        for flow in self.app.flows:
            for work in flow.works(recurse=False):
                work_requirements = "\n".join(
                    work.cloud_build_config.requirements)
                build_spec = V1BuildSpec(
                    commands=work.cloud_build_config.build_commands(),
                    python_dependencies=V1PythonDependencyInfo(
                        package_manager=V1PackageManager.PIP,
                        packages=work_requirements),
                    image=work.cloud_build_config.image,
                )
                user_compute_config = V1UserRequestedComputeConfig(
                    name=work.cloud_compute.name,
                    count=1,
                    disk_size=work.cloud_compute.disk_size,
                    preemptible=work.cloud_compute.preemptible,
                    shm_size=work.cloud_compute.shm_size,
                )
                random_name = "".join(
                    random.choice(string.ascii_lowercase) for _ in range(5))
                spec = V1LightningworkSpec(
                    build_spec=build_spec,
                    user_requested_compute_config=user_compute_config,
                    network_config=[
                        V1NetworkConfig(name=random_name, port=work.port)
                    ],
                )
                work_reqs.append(V1Work(name=work.name, spec=spec))

        # We need to collect a spec for each flow that contains a frontend so that the backend knows
        # for which flows it needs to start servers by invoking the cli (see the serve_frontend() method below)
        frontend_specs: List[V1Flowserver] = []
        for flow_name in self.app.frontends.keys():
            frontend_spec = V1Flowserver(name=flow_name)
            frontend_specs.append(frontend_spec)

        app_spec = V1LightningappInstanceSpec(
            app_entrypoint_file=str(app_entrypoint_file),
            enable_app_server=self.start_server,
            flow_servers=frontend_specs,
            desired_state=V1LightningappInstanceState.RUNNING,
            env=v1_env_vars,
        )
        # if requirements file at the root of the repository is present,
        # we pass just the file name to the backend, so backend can find it in the relative path
        if requirements_file.is_file():
            app_spec.image_spec = Gridv1ImageSpec(
                dependency_file_info=V1DependencyFileInfo(
                    package_manager=V1PackageManager.PIP,
                    path="requirements.txt"))
            if not DISABLE_DEPENDENCY_CACHE and not kwargs.get("no_cache"):
                # hash used for caching the dependencies
                app_spec.dependency_cache_key = get_hash(requirements_file)
        # we'll get the default project (quite similar to Github Organization) from the backend
        project = _get_project(self.backend.client)

        try:
            list_apps_resp = self.backend.client.lightningapp_v2_service_list_lightningapps_v2(
                project.project_id, name=app_config.name)
            if list_apps_resp.lightningapps:
                # There can be only one app with unique project_id<>name pair
                lightning_app = list_apps_resp.lightningapps[0]
            else:
                app_body = Body7(name=app_config.name)
                lightning_app = self.backend.client.lightningapp_v2_service_create_lightningapp_v2(
                    project.project_id, app_body)

            release_body = Body8(
                app_entrypoint_file=app_spec.app_entrypoint_file,
                enable_app_server=app_spec.enable_app_server,
                flow_servers=app_spec.flow_servers,
                image_spec=app_spec.image_spec,
                works=[
                    V1Work(name=work_req.name, spec=work_req.spec)
                    for work_req in work_reqs
                ],
                local_source=True,
                dependency_cache_key=app_spec.dependency_cache_key,
            )
            lightning_app_release = self.backend.client.lightningapp_v2_service_create_lightningapp_release(
                project.project_id, lightning_app.id, release_body)

            if lightning_app_release.source_upload_url == "":
                raise RuntimeError("The source upload url is empty.")

            repo.package()
            repo.upload(url=lightning_app_release.source_upload_url)

            # right now we only allow a single instance of the app
            find_instances_resp = self.backend.client.lightningapp_instance_service_list_lightningapp_instances(
                project.project_id, app_id=lightning_app.id)
            if find_instances_resp.lightningapps:
                existing_instance = find_instances_resp.lightningapps[0]
                if existing_instance.status.phase != V1LightningappInstanceState.STOPPED:
                    # TODO(yurij): Implement release switching in the UI and remove this
                    # We can only switch release of the stopped instance
                    existing_instance = self.backend.client.lightningapp_instance_service_update_lightningapp_instance(
                        project_id=project.project_id,
                        id=existing_instance.id,
                        body=Body3(spec=V1LightningappInstanceSpec(
                            desired_state=V1LightningappInstanceState.STOPPED)
                                   ),
                    )
                    # wait for the instance to stop for up to 150 seconds
                    for _ in range(150):
                        existing_instance = self.backend.client.lightningapp_instance_service_get_lightningapp_instance(
                            project_id=project.project_id,
                            id=existing_instance.id)
                        if existing_instance.status.phase == V1LightningappInstanceState.STOPPED:
                            break
                        time.sleep(1)
                    if existing_instance.status.phase != V1LightningappInstanceState.STOPPED:
                        raise RuntimeError(
                            "Failed to stop the existing instance.")

                lightning_app_instance = (
                    self.backend.client.
                    lightningapp_instance_service_update_lightningapp_instance_release(
                        project_id=project.project_id,
                        id=existing_instance.id,
                        body=Body4(release_id=lightning_app_release.id),
                    ))

                self.backend.client.lightningapp_instance_service_update_lightningapp_instance(
                    project_id=project.project_id,
                    id=existing_instance.id,
                    body=Body3(spec=V1LightningappInstanceSpec(
                        desired_state=V1LightningappInstanceState.RUNNING,
                        env=v1_env_vars)),
                )
            else:
                lightning_app_instance = (
                    self.backend.client.
                    lightningapp_v2_service_create_lightningapp_release_instance(
                        project.project_id,
                        lightning_app.id,
                        lightning_app_release.id,
                        Body9(
                            desired_state=V1LightningappInstanceState.RUNNING,
                            name=lightning_app.name,
                            env=v1_env_vars),
                    ))
        except ApiException as e:
            logger.error(e.body)
            sys.exit(1)

        if on_before_run:
            on_before_run(lightning_app_instance)

        if lightning_app_instance.status.phase == V1LightningappInstanceState.FAILED:
            raise RuntimeError(
                "Failed to create the application. Cannot upload the source code."
            )

        if cleanup_handle:
            cleanup_handle()
def test_repository_lightningignore_unpackage(tmp_path, monkeypatch):
    """.lightningignore behaves similarly to the gitignore standard."""

    lorem_ipsum = "Lorem ipsum dolor sit amet, consectetur adipiscing elit."

    cache_path = tmp_path / "cache"
    monkeypatch.setattr(LocalSourceCodeDir, "cache_location", cache_path)

    source_path = tmp_path / "source"
    source_path.mkdir()

    # set cache location to temp dir

    lightningignore = """
    # Ignore on all levels
    *.pyc
    *__pycache__/
    build/
    .env
    # Ignore wildcard on one level
    ./*.txt
    /*.md
    ./one-level/*.txt
    /one-level/*.md
    # Ignore only relative
    ./downloads
    /relative_downloads
    # nested
    /nested//level/
    /nested/level/
    """
    (source_path / ".lightningignore").write_text(lightningignore)

    # Dir structure
    (source_path / "include.py").write_text(lorem_ipsum)
    (source_path / "exclude.pyc").write_text(lorem_ipsum)
    (source_path / "__pycache__").mkdir()
    (source_path / "__pycache__" / "exclude.py").write_text(
        lorem_ipsum
    )  # Even tho it's .py it's in excluded __pycache__ directory
    (source_path / "__pycache__" / "exclude.pyc").write_text(
        lorem_ipsum
    )  # Even tho it's .py it's in excluded __pycache__ directory
    (source_path / "build.py").write_text(
        lorem_ipsum)  # Common prefix with excluded build but it's not it
    (source_path / "builds"
     ).mkdir()  # Common prefix with excluded build but it's not excluded
    (source_path / "builds" / "include.py").write_text(lorem_ipsum)
    (source_path / "builds" / "__pycache__").mkdir()  # Recursively excluded
    (source_path / "builds" / "__pycache__" /
     "exclude.py").write_text(lorem_ipsum)
    (source_path / "build").mkdir()  # Recursively excluded
    (source_path / "build" / "exclude.db").write_text(lorem_ipsum)
    (source_path / ".env").write_text(
        lorem_ipsum)  # No issues with handling hidden (.dot) files
    (source_path / "downloads").mkdir()  # exclude
    (source_path / "downloads" / "something.jpeg").write_text(lorem_ipsum)
    (source_path / "relative_downloads").mkdir()  # exclude
    (source_path / "relative_downloads" /
     "something.jpeg").write_text(lorem_ipsum)
    (source_path / "include").mkdir()  # include
    (source_path / "include" / "exclude.pyc").write_text(
        lorem_ipsum)  # exclude because of *.pyc rule
    (source_path / "include" / "include.py").write_text(lorem_ipsum)  # include
    (source_path / "include" / "downloads"
     ).mkdir()  # include because it was excluded only relative to root
    (source_path / "include" / "downloads" /
     "something.jpeg").write_text(lorem_ipsum)
    (source_path / "include" / "relative_downloads"
     ).mkdir()  # include because it was excluded only relative to root
    (source_path / "include" / "relative_downloads" /
     "something.jpeg").write_text(lorem_ipsum)
    (source_path / "exclude.txt").write_text(lorem_ipsum)
    (source_path / "exclude.md").write_text(lorem_ipsum)
    (source_path / "one-level").mkdir()
    (source_path / "one-level" / "exclude.txt").write_text(lorem_ipsum)
    (source_path / "one-level" / "exclude.md").write_text(lorem_ipsum)
    (source_path / "one-level" / "include.py").write_text(lorem_ipsum)
    (source_path / "nested").mkdir()
    (source_path / "nested" / "include.py").write_text(lorem_ipsum)
    (source_path / "nested" / "level").mkdir()
    (source_path / "nested" / "level" / "exclude.py").write_text(lorem_ipsum)

    # create repo object
    repository = LocalSourceCodeDir(path=source_path)
    repository.package()

    unpackage_path = tmp_path / "unpackage"

    with tarfile.open(repository.package_path) as f:
        f.extractall(unpackage_path)

    assert (unpackage_path / "include.py").exists()
    assert not (unpackage_path / "exclude.pyc").exists()  # Excluded by *.pyc
    assert not (unpackage_path / "__pycache__").exists()
    assert not (unpackage_path / "__pycache__" / "exclude.py").exists(
    )  # Even tho it's .py it's in excluded __pycache__ directory
    assert not (unpackage_path / "__pycache__" / "exclude.pyc").exists(
    )  # Even tho it's .py it's in excluded __pycache__ directory
    assert (unpackage_path / "build.py"
            ).exists()  # Common prefix with excluded build but it's not it
    assert (unpackage_path / "builds" / "include.py").exists()
    assert not (unpackage_path / "builds" /
                "__pycache__").exists()  # Recursively excluded
    assert not (unpackage_path / "builds" / "__pycache__" /
                "exclude.py").exists()
    assert not (unpackage_path / "build").exists()  # Recursively excluded
    assert not (unpackage_path / "build" / "exclude.db").exists()
    assert not (unpackage_path /
                ".env").exists()  # No issues with handling hidden (.dot) files
    assert not (unpackage_path / "downloads").mkdir()  # exclude
    assert not (unpackage_path / "downloads" / "something.jpeg").exists()
    assert not (unpackage_path / "relative_downloads").mkdir()  # exclude
    assert not (unpackage_path / "relative_downloads" /
                "something.jpeg").exists()
    assert not (unpackage_path / "include" /
                "exclude.pyc").exists()  # exclude because of *.pyc rule
    assert (unpackage_path / "include" / "include.py").exists()  # include
    assert (unpackage_path / "include" / "downloads" / "something.jpeg"
            ).exists()  # include because it was excluded only relative to root
    assert (unpackage_path / "include" / "relative_downloads" /
            "something.jpeg"
            ).exists()  # include because it was excluded only relative to root
    assert not (unpackage_path / "exclude.txt").exists()
    assert not (unpackage_path / "exclude.md").exists()
    assert not (unpackage_path / "one-level" / "exclude.txt").exists()
    assert not (unpackage_path / "one-level" / "exclude.md").exists()
    assert (unpackage_path / "one-level" / "include.py").exists()
    assert (unpackage_path / "nested" / "include.py").exists()
    assert not (unpackage_path / "nested" / "level" / "exclude.py").exists()
def test_repository_lightningignore_supports_different_patterns(tmp_path):
    """.lightningignore parsing supports different patterns."""
    # write .lightningignore file
    # default github python .gitignore
    lightningignore = """
    # ignore files in this dir
    ignore/

    # Byte-compiled / optimized / DLL files
    __pycache__/
    *.py[cod]
    *$py.class

    # C extensions
    *.so

    # Distribution / packaging
    .Python
    build/
    develop-eggs/
    dist/
    downloads/
    eggs/
    .eggs/
    lib/
    lib64/
    parts/
    sdist/
    var/
    wheels/
    *.egg-info/
    .installed.cfg
    *.egg
    MANIFEST

    # PyInstaller
    #  Usually these files are written by a python script from a template
    #  before PyInstaller builds the exe, so as to inject date/other infos into it.
    *.manifest
    *.spec

    # Installer logs
    pip-log.txt
    pip-delete-this-directory.txt

    # Unit test / coverage reports
    htmlcov/
    .tox/
    .coverage
    .coverage.*
    .cache
    nosetests.xml
    coverage.xml
    *.cover
    .hypothesis/
    .pytest_cache/

    # Translations
    *.mo
    *.pot

    # Django stuff:
    *.log
    local_settings.py
    db.sqlite3

    # Flask stuff:
    instance/
    .webassets-cache

    # Scrapy stuff:
    .scrapy

    # Sphinx documentation
    docs/_build/

    # PyBuilder
    target/

    # Jupyter Notebook
    .ipynb_checkpoints

    # pyenv
    .python-version

    # celery beat schedule file
    celerybeat-schedule

    # SageMath parsed files
    *.sage.py

    # Environments
    .env
    .env.docker
    .venv
    env/
    venv/
    ENV/
    env.bak/
    venv.bak/

    # Spyder project settings
    .spyderproject
    .spyproject

    # Rope project settings
    .ropeproject

    # mkdocs documentation
    /site

    # mypy
    .mypy_cache/

    # VS Code files
    .vscode/

    # UI files
    node_modules/

    # Data files
    models/
    models/*
    !grid/openapi/models
    postgresql_data/
    redis_data/

    # Secrets folders
    secrets/

    # Built UI
    ui/

    # Ignores Grid Runner
    vendor/
    ignore_test.py

    # Ignore cov report
    *.xml

    """
    (tmp_path / ".lightningignore").write_text(lightningignore)

    # write some data to file and check version
    (tmp_path / "test.txt").write_text(str(uuid.uuid4()))

    # create repo object
    repository = LocalSourceCodeDir(path=Path(tmp_path))
    checksum_a = repository.version

    # write file that needs to be ignored
    (tmp_path / "ignore").mkdir()
    (tmp_path / "ignore/test.txt").write_text(str(uuid.uuid4()))

    # check that version remains the same
    repository = LocalSourceCodeDir(path=Path(tmp_path))
    checksum_b = repository.version

    assert checksum_a == checksum_b