def task_config(self): config = get_config(self.workflow) try: task = config["tasks"][self.task_name] except KeyError: raise CommandError(f"task name={self.task_name} not found") return task
def task_config(self): config = get_config() try: task = config['tasks'][self.task_name] except KeyError: raise CommandError(f'task name={self.task_name} not found') return task
def profile_files(self) -> dict: """ Returns the profile data found in the dc.yml file """ config = get_config() profile_name = self.args.profile try: profile = config['profiles'][profile_name] except KeyError: raise NoSuchProfile(f'profile={profile_name}') return profile
def profile_files(self) -> dict: """ Returns the profile data found in the dc.yml file """ config = get_config(self.workflow) if not config: return {} profile_name = self.workflow.args.profile # when there is no profile name, return just docker-compose.yml if profile_name is None: return ["docker-compose.yml"] try: profile = config["profiles"][profile_name] except KeyError: raise NoSuchProfile(f"profile={profile_name}") return profile
def is_dirty_working_copy_okay(self, exc: Exception) -> bool: """ Checks to see if the project's compose-flow.yml allows for the env to use a dirty working copy To configure an environment to allow a dirty working copy, add the following to the compose-flow.yml ``` options: env_name: dirty_working_copy_okay: true ``` This defaults to False """ config = get_config() or {} env = self.workflow.args.environment dirty_working_copy_okay = self.workflow.args.dirty or config.get( 'options', {} ).get(env, {}).get('dirty_working_copy_okay', self.dirty_working_copy_okay) return dirty_working_copy_okay
def get_backend(self, remote=None): backend_name = "local" remote = remote or self.workflow.args.remote project_config = get_config(self.workflow) if remote is not None: project_backend_name = (project_config.get("remotes", {}).get( remote, {}).get("backend")) default_backend_name = self._get_default_backend(remote) if project_backend_name: # If a project backend is defined, use that backend_name = project_backend_name elif default_backend_name: # If the remote has a default backend defined in the global config, use that backend_name = default_backend_name # Otherwise use the hard-coded backend_name above backend = get_backend(backend_name, workflow=self.workflow) self.logger.debug(f"backend_name={backend_name}, backend={backend}") return backend
def handle(self): config = get_config() task_name = self.args.name try: task = config['tasks'][task_name] except KeyError: raise CommandError('task name={task_name} not found') command = task['command'] command_split = shlex.split(command) if command_split[0] != 'compose-flow': raise NotImplementedError( 'tasks that are not compose-flow are not yet supported') subcommand_name = command_split[1] subcommand = self.get_subcommand(subcommand_name) subcommand_args = command_split[2:] subcommand.run(subcommand_args) return f'{subcommand}'
def config(self): return get_config()
def get_rke_deploy_command(self): raw_config = get_config()['rke']['config'] rendered_config = f'compose-flow-{self.workflow.args.profile}-rke.yml' self.render_single_yaml(raw_config, rendered_config) return f'rke up --config {rendered_config}'
def config(self): return get_config(self.workflow)
def get_rke_deploy_command(self): raw_config = get_config(self.workflow)["rke"]["config"] rendered_config = f"compose-flow-{self.workflow.args.profile}-rke.yml" self.render_single_yaml(raw_config, rendered_config) return f"rke up --config {rendered_config}"