def up(self) -> None:
        """Ensures that Airflow is running."""
        if self.is_running:
            logger.info("Airflow is already running.")
            self._log_webserver_credentials()
            return

        if not fileio.file_exists(self.dags_directory):
            fileio.create_dir_recursive_if_not_exists(self.dags_directory)

        from airflow.cli.commands.standalone_command import StandaloneCommand

        command = StandaloneCommand()
        # Run the daemon with a working directory inside the current
        # zenml repo so the same repo will be used to run the DAGs
        daemon.run_as_daemon(
            command.run,
            pid_file=self.pid_file,
            log_file=self.log_file,
            working_directory=fileio.get_zenml_dir(),
        )

        while not self.is_running:
            # Wait until the daemon started all the relevant airflow processes
            time.sleep(0.1)

        self._log_webserver_credentials()
Beispiel #2
0
def clean(git_examples_handler: GitExamplesHandler) -> None:
    """Deletes the ZenML examples directory from your current working
    directory."""
    examples_directory = os.path.join(os.getcwd(), "zenml_examples")
    if (fileio.file_exists(examples_directory)
            and fileio.is_dir(examples_directory) and confirmation(
                "Do you wish to delete the ZenML examples directory? \n"
                f"{examples_directory}")):
        git_examples_handler.clean_current_examples()
        declare(
            "ZenML examples directory was deleted from your current working "
            "directory.")
    elif not fileio.file_exists(examples_directory) and not fileio.is_dir(
            examples_directory):
        logger.error(f"Unable to delete the ZenML examples directory - "
                     f"{examples_directory} - "
                     "as it was not found in your current working directory.")
Beispiel #3
0
def pull(
    git_examples_handler: GitExamplesHandler,
    example_name: str,
    force: bool,
    version: str,
) -> None:
    """Pull examples straight into your current working directory.
    Add the flag --force or -f to redownload all the examples afresh.
    Use the flag --version or -v and the version number to specify
    which version of ZenML you wish to use for the examples."""
    git_examples_handler.pull(force=force, version=version)
    destination_dir = os.path.join(os.getcwd(), "zenml_examples")
    fileio.create_dir_if_not_exists(destination_dir)

    examples = (git_examples_handler.examples if not example_name else [
        Example(
            example_name,
            Path(
                os.path.join(
                    git_examples_handler.examples_repo.examples_dir,
                    example_name,
                )),
        )
    ])

    for example in examples:
        if not fileio.file_exists(str(example.path)):
            error(
                f"Example {example.name} does not exist! Available examples: "
                f"{[e.name for e in git_examples_handler.examples]}")
            return

        example_destination_dir = os.path.join(destination_dir, example.name)
        if fileio.file_exists(example_destination_dir):
            if confirmation(f"Example {example.name} is already pulled. "
                            f"Do you wish to overwrite the directory?"):
                fileio.rm_dir(example_destination_dir)
            else:
                warning(f"Example {example.name} not overwritten.")
                continue

        declare(f"Pulling example {example.name}...")
        git_examples_handler.copy_example(example, example_destination_dir)

        declare(f"Example pulled in directory: {example_destination_dir}")
Beispiel #4
0
def read_json(file_path: str) -> Any:
    """Read JSON on file path and returns contents as dict.

    Args:
        file_path: Path to JSON file.
    """
    if fileio.file_exists(file_path):
        contents = fileio.read_file_contents_as_string(file_path)
        return json.loads(contents)
    else:
        raise FileNotFoundError(f"{file_path} does not exist.")
Beispiel #5
0
    def __init__(self, **data: Any):
        """We persist the attributes in the config file. For the global
        config, we want to persist the data as soon as it is initialized for
        the first time."""
        super().__init__(**data)

        # At this point, if the serialization file does not exist we should
        #  create it and dump our data.
        f = self.get_serialization_full_path()
        if not fileio.file_exists(str(f)):
            self._dump()
Beispiel #6
0
 def readme_content(self) -> str:
     """Returns the readme content associated with a particular example."""
     readme_file = os.path.join(self.path, "README.md")
     try:
         with open(readme_file) as readme:
             readme_content = readme.read()
         return readme_content
     except FileNotFoundError:
         if fileio.file_exists(str(self.path)) and fileio.is_dir(
                 str(self.path)):
             raise ValueError(f"No README.md file found in {self.path}")
         else:
             raise FileNotFoundError(
                 f"Example {self.name} is not one of the available options."
                 f"\nTo list all available examples, type: `zenml example "
                 f"list`")
Beispiel #7
0
def read_yaml(file_path: str) -> Any:
    """Read YAML on file path and returns contents as dict.

    Args:
        file_path: Path to YAML file.

    Returns:
        Contents of the file in a dict.

    Raises:
        FileNotFoundError if file does not exist.
    """
    if fileio.file_exists(file_path):
        contents = fileio.read_file_contents_as_string(file_path)
        return yaml.load(contents, Loader=yaml.FullLoader)
    else:
        raise FileNotFoundError(f"{file_path} does not exist.")
Beispiel #8
0
    def _save_backup_file_if_required(self) -> None:
        """Saves a backup of the config file if the schema changed."""
        if self._superfluous_options:
            logger.warning(
                "Found superfluous configuration values for class `%s`: %s",
                self.__class__.__name__,
                set(self._superfluous_options),
            )
            config_path = self.get_serialization_full_path()
            if fileio.file_exists(config_path):
                backup_path = config_path + ".backup"
                fileio.copy(config_path, backup_path, overwrite=True)
                logger.warning("Saving backup configuration to '%s'.",
                               backup_path)

            # save the updated file without the extra options
            self.update()
    def _log_webserver_credentials(self):
        """Logs URL and credentials to login to the airflow webserver.

        Raises:
            FileNotFoundError: If the password file does not exist.
        """
        if fileio.file_exists(self.password_file):
            with open(self.password_file) as file:
                password = file.read().strip()
        else:
            raise FileNotFoundError(
                f"Can't find password file '{self.password_file}'")
        logger.info(
            "To inspect your DAGs, login to http://0.0.0.0:8080 "
            "with username: admin password: %s",
            password,
        )
Beispiel #10
0
    def json_config_settings_source(settings: BaseSettings) -> Dict[str, Any]:
        """
        A simple settings source that loads variables from a YAML file
        at the project's root.

        Here we happen to choose to use the `env_file_encoding` from Config
        when reading the config json file.

        Args:
            settings (BaseSettings): BaseSettings from pydantic.

        Returns:
            A dict with all configuration, empty dict if config not found.
        """
        full_path = Path(config_dir) / config_name
        logger.debug(f"Parsing file: {full_path}")
        if fileio.file_exists(str(full_path)):
            return cast(Dict[str, Any], yaml_utils.read_json(str(full_path)))
        return {}
    def _copy_to_dag_directory_if_necessary(self, dag_filepath: str):
        """Copies the DAG module to the airflow DAGs directory if it's not
        already located there.

        Args:
            dag_filepath: Path to the file in which the DAG is defined.
        """
        dags_directory = fileio.resolve_relative_path(self.dags_directory)

        if dags_directory == os.path.dirname(dag_filepath):
            logger.debug("File is already in airflow DAGs directory.")
        else:
            logger.debug("Copying dag file '%s' to DAGs directory.",
                         dag_filepath)
            destination_path = os.path.join(dags_directory,
                                            os.path.basename(dag_filepath))
            if fileio.file_exists(destination_path):
                logger.info(
                    "File '%s' already exists, overwriting with new DAG file",
                    destination_path,
                )
            fileio.copy(dag_filepath, destination_path, overwrite=True)
Beispiel #12
0
 def _create_serialization_file_if_not_exists(self) -> None:
     """Creates the serialization file if it does not exist."""
     f = self.get_serialization_full_path()
     if not fileio.file_exists(str(f)):
         fileio.create_file_if_not_exists(str(f))
Beispiel #13
0
def test_global_config_file_creation():
    """A simple test to check whether the global config is created."""
    GlobalConfig()

    # Raw config should now exist
    assert fileio.file_exists(os.path.join(APP_DIR, GLOBAL_CONFIG_NAME))