def doc_task(task_name, task_config, project_config=None, org_config=None): """ Document a (project specific) task configuration in RST format. """ from cumulusci.core.utils import import_global doc = [] doc.append( "{}\n==========================================\n".format(task_name)) doc.append("**Description:** {}\n".format(task_config.description)) doc.append("**Class::** {}\n".format(task_config.class_path)) task_class = import_global(task_config.class_path) if "task_docs" in task_class.__dict__: task_docs = textwrap.dedent(task_class.task_docs.strip("\n")) doc.append(task_docs + "\n") if task_class.task_options: doc.append("Options:\n------------------------------------------\n") defaults = task_config.options or {} for name, option in list(task_class.task_options.items()): default = defaults.get(name) if default: default = " **Default: {}**".format(default) else: default = "" if option.get("required"): doc.append("* **{}** *(required)*: {}{}".format( name, option.get("description"), default)) else: doc.append("* **{}**: {}{}".format(name, option.get("description"), default)) return "\n".join(doc)
def doc_task(task_name, task_config, project_config=None, org_config=None): """ Document a (project specific) task configuration in RST format. """ from cumulusci.core.utils import import_global doc = [] doc.append( f"**{task_name}**\n==========================================\n") doc.append(f"**Description:** {task_config.description}\n") doc.append(f"**Class:** {task_config.class_path}\n") task_class = import_global(task_config.class_path) if "task_docs" in task_class.__dict__: task_docs = textwrap.dedent(task_class.task_docs.strip("\n")) doc.append(task_docs) task_option_info = get_task_option_info(task_config, task_class) doc.append("Command Syntax\n------------------------------------------\n") command_syntax = get_command_syntax(task_name) doc.append(command_syntax) task_option_doc = create_task_options_doc(task_option_info) if task_option_doc: doc.append("Options\n------------------------------------------\n") doc.extend(task_option_doc) return "\n".join(doc)
def _run_task(self): mapping_file = os.path.abspath(self.options["mapping"]) assert os.path.exists(mapping_file), f"{mapping_file} cannot be found." database_url = self.options.get("database_url") with temporary_dir() as tempdir: if not database_url: sqlite_path = os.path.join(tempdir, "generated_data.db") database_url = f"sqlite:///" + sqlite_path subtask_options = { **self.options, "mapping": mapping_file, "database_url": database_url } class_path = self.options.get("data_generation_task", None) if not class_path: class_path = "cumulusci.tasks." task_class = import_global(class_path) task_config = TaskConfig({"options": subtask_options}) data_gen_task = task_class(self.project_config, task_config, org_config=self.org_config) data_gen_task() subtask_config = TaskConfig({"options": subtask_options}) subtask = LoadData( project_config=self.project_config, task_config=subtask_config, org_config=self.org_config, flow=self.flow, name=self.name, stepnum=self.stepnum, ) subtask()
def _init_options(self, kwargs): super()._init_options(kwargs) mapping_file = self.options.get("mapping", None) if mapping_file: self.mapping_file = os.path.abspath(mapping_file) if not os.path.exists(self.mapping_file): raise TaskOptionsError(f"{self.mapping_file} cannot be found.") else: self.mapping_file = None self.database_url = self.options.get("database_url") self.num_records = int(self.options["num_records"]) self.batch_size = int(self.options.get("batch_size", self.num_records)) if self.batch_size <= 0: raise TaskOptionsError("Batch size should be greater than zero") class_path = self.options.get("data_generation_task") if class_path: self.data_generation_task = import_global(class_path) else: raise TaskOptionsError("No data generation task specified") self.debug_dir = self.options.get("debug_dir", None) self.database_url = self.options.get("database_url") if self.database_url: engine, metadata = self._setup_engine(self.database_url) tables = metadata.tables if len(list(tables)) and not self.options.get("replace_database"): raise TaskOptionsError( f"Database {self.database_url} has tables " f"({list(tables)}) " "but `replace_database` was not specified")
def task_run(config, task_name, org, o, debug, debug_before, debug_after, no_prompt): # Get necessary configs org, org_config = config.get_org(org, fail_if_missing=False) task_config = getattr(config.project_config, "tasks__{}".format(task_name)) if not task_config: raise TaskNotFoundError("Task not found: {}".format(task_name)) # Get the class to look up options class_path = task_config.get("class_path") task_class = import_global(class_path) # Parse command line options and add to task config if o: if "options" not in task_config: task_config["options"] = {} for name, value in o: # Validate the option if name not in task_class.task_options: raise click.UsageError( 'Option "{}" is not available for task {}'.format( name, task_name)) # Override the option in the task config task_config["options"][name] = value task_config = TaskConfig(task_config) # Create and run the task try: task = task_class(config.project_config, task_config, org_config=org_config) if debug_before: import pdb pdb.set_trace() task() if debug_after: import pdb pdb.set_trace() except CumulusCIUsageError as e: # Usage error; report with usage line and no traceback exception = click.UsageError(str(e)) handle_exception_debug(config, debug, throw_exception=exception) except (CumulusCIFailure, ScratchOrgException) as e: # Expected failure; report without traceback exception = click.ClickException(str(e) or e.__class__.__name__) handle_exception_debug(config, debug, throw_exception=exception) except Exception: # Unexpected exception; log to sentry and raise handle_exception_debug(config, debug, no_prompt=no_prompt) config.alert("Task complete: {}".format(task_name))
def doc_task(task_name, task_config, project_config=None, org_config=None): """ Document a (project specific) task configuration in RST format. """ from cumulusci.core.utils import import_global doc = [] doc.append("{}\n==========================================\n".format(task_name)) doc.append("**Description:** {}\n".format(task_config.description)) doc.append("**Class::** {}\n".format(task_config.class_path)) task_class = import_global(task_config.class_path) task_docs = textwrap.dedent(task_class.task_docs.strip("\n")) if task_docs: doc.append(task_docs + "\n") if task_class.task_options: doc.append("Options:\n------------------------------------------\n") defaults = task_config.options or {} for name, option in list(task_class.task_options.items()): default = defaults.get(name) if default: default = " **Default: {}**".format(default) else: default = "" if option.get("required"): doc.append( "* **{}** *(required)*: {}{}".format( name, option.get("description"), default ) ) else: doc.append( "* **{}**: {}{}".format(name, option.get("description"), default) ) return "\n".join(doc)
def get_keychain_class(self): default_keychain_class = (self.project_config.cumulusci__keychain if not self.is_global_keychain else self.global_config.cumulusci__keychain) keychain_class = os.environ.get("CUMULUSCI_KEYCHAIN_CLASS", default_keychain_class) return import_global(keychain_class)
def callback(*args, **kwargs): if config.is_global_keychain: project = False else: project = kwargs.pop("project", False) serv_conf = dict((k, v) for k, v in list(kwargs.items()) if v is not None) # remove None values # A service can define a callable to validate the service config validator_path = service_config.get("validator") if validator_path: validator = import_global(validator_path) try: validator(serv_conf) except Exception as e: raise click.UsageError(str(e)) config.keychain.set_service(name, ServiceConfig(serv_conf), project) if project: click.echo( "{0} is now configured for this project.".format(name)) else: click.echo( "{0} is now configured for all CumulusCI projects.".format( name))
def get_keychain_class(self): default_keychain_class = (self.project_config.cumulusci__keychain if self.project_config is not None else self.universal_config.cumulusci__keychain) keychain_class = os.environ.get("CUMULUSCI_KEYCHAIN_CLASS", default_keychain_class) if keychain_class: return import_global(keychain_class)
def _init_parsers(self): """Invoked from Super Class""" for cfg in self.parser_config: parser_class = import_global(cfg["class_path"]) self.parsers.append(parser_class(self, cfg["title"])) # Additional parser to collect developer notes above tracked headers self.parsers.append(GithubLinesParser(self, title=None)) self.parsers[-1]._in_section = True
def get_keychain_class(self): default_keychain_class = ( self.project_config.cumulusci__keychain if not self.is_global_keychain else self.global_config.cumulusci__keychain ) keychain_class = os.environ.get( "CUMULUSCI_KEYCHAIN_CLASS", default_keychain_class ) return import_global(keychain_class)
def _init_options(self, kwargs): super()._init_options(kwargs) self.mapping_file = os.path.abspath(self.options["mapping"]) if not os.path.exists(self.mapping_file): raise TaskOptionsError(f"{self.mapping_file} cannot be found.") self.database_url = self.options.get("database_url") self.num_records = int(self.options["num_records"]) self.batch_size = int(self.options.get("batch_size", self.num_records)) if self.batch_size <= 0: raise TaskOptionsError("Batch size should be greater than zero") self.class_path = self.options.get("data_generation_task") self.data_generation_task = import_global(self.class_path) if self.database_url and self.batch_size != self.num_records: raise TaskOptionsError( "You may not specify both `database_url` and `batch_size` options." )
def __call__(self, **options): cache_key = (self.task_name, tuple(sorted(options.items()))) if cache_key in self.cache.results: return self.cache.results[cache_key].return_values task_config = self.cache.flow.project_config.tasks[self.task_name] task_class = import_global(task_config["class_path"]) step = StepSpec(1, self.task_name, task_config, task_class) self.cache.flow.callbacks.pre_task(step) result = TaskRunner( self.cache.flow.project_config, step, self.cache.flow.org_config, self.cache.flow, ).run_step(**options) self.cache.flow.callbacks.post_task(step, result) self.cache.results[cache_key] = result return result.return_values
def task_run(runtime, task_name, org, o, debug, debug_before, debug_after, no_prompt): # Get necessary configs org, org_config = runtime.get_org(org, fail_if_missing=False) task_config = runtime.project_config.get_task(task_name) # Get the class to look up options class_path = task_config.class_path task_class = import_global(class_path) # Parse command line options and add to task config if o: if "options" not in task_config.config: task_config.config["options"] = {} for name, value in o: # Validate the option if name not in task_class.task_options: raise click.UsageError( f'Option "{name}" is not available for task {task_name}') # Override the option in the task config task_config.config["options"][name] = value # Create and run the task try: task = task_class(task_config.project_config, task_config, org_config=org_config) if debug_before: import pdb pdb.set_trace() task() if debug_after: import pdb pdb.set_trace() finally: runtime.alert(f"Task complete: {task_name}")
def callback(*args, **kwargs): if runtime.project_config is None: project = False else: project = kwargs.pop("project", False) serv_conf = dict((k, v) for k, v in list(kwargs.items()) if v is not None) # remove None values # A service can define a callable to validate the service config validator_path = service_config.get("validator") if validator_path: validator = import_global(validator_path) validator(serv_conf) runtime.keychain.set_service(name, ServiceConfig(serv_conf), project) if project: click.echo(f"{name} is now configured for this project.") else: click.echo( f"{name} is now configured for all CumulusCI projects.")
def _init_task(self, class_path, options, task_config): task_class = import_global(class_path) task_config = self._parse_task_options(options, task_class, task_config) return task_class, task_config
import json import os from cumulusci.core.config import ConnectedAppOAuthConfig from cumulusci.core.config import OrgConfig from cumulusci.core.config import ScratchOrgConfig from cumulusci.core.config import ServiceConfig from cumulusci.core.keychain import BaseProjectKeychain from cumulusci.core.utils import import_global scratch_org_class = os.environ.get("CUMULUSCI_SCRATCH_ORG_CLASS") if scratch_org_class: scratch_org_factory = import_global(scratch_org_class) else: scratch_org_factory = ScratchOrgConfig class EnvironmentProjectKeychain(BaseProjectKeychain): """ A project keychain that stores org credentials in environment variables """ encrypted = False org_var_prefix = "CUMULUSCI_ORG_" app_var = "CUMULUSCI_CONNECTED_APP" service_var_prefix = "CUMULUSCI_SERVICE_" def _get_env(self): """ loads the environment variables as unicode if ascii """ env = {} for k, v in os.environ.items(): k = k.decode() if isinstance(k, bytes) else k v = v.decode() if isinstance(v, bytes) else v
def _init_parsers(self): for cfg in self.parser_config: if cfg["class_path"] is None: continue parser_class = import_global(cfg["class_path"]) self.parsers.append(parser_class(self, cfg["title"]))
def get_autoscaler(app_name): """Fetches the appropriate autoscaler given the app name""" autoscaler_class = import_global(settings.METACI_WORKER_AUTOSCALER) return autoscaler_class(settings.AUTOSCALERS[app_name])
def _visit_step( self, number, step_config, project_config, visited_steps=None, parent_options=None, parent_ui_options=None, from_flow=None, ): """ for each step (as defined in the flow YAML), _visit_step is called with only the first two parameters. this takes care of validating the step, collating the option overrides, and if it is a task, creating a StepSpec for it. If it is a flow, we recursively call _visit_step with the rest of the parameters of context. :param number: StepVersion representation of the current step number :param step_config: the current step's config (dict from YAML) :param visited_steps: used when called recursively for nested steps, becomes the return value :param parent_options: used when called recursively for nested steps, options from parent flow :param parent_ui_options: used when called recursively for nested steps, UI options from parent flow :param from_flow: used when called recursively for nested steps, name of parent flow :return: List[StepSpec] a list of all resolved steps including/under the one passed in """ number = StepVersion(str(number)) if visited_steps is None: visited_steps = [] if parent_options is None: parent_options = {} if parent_ui_options is None: parent_ui_options = {} # Step Validation # - A step is either a task OR a flow. if all(k in step_config for k in ("flow", "task")): raise FlowConfigError( f"Step {number} is configured as both a flow AND a task. \n\t{step_config}." ) # Skips # - either in YAML (with the None string) # - or by providing a skip list to the FlowRunner at initialization. if (("flow" in step_config and step_config["flow"] == "None") or ("task" in step_config and step_config["task"] == "None") or ("task" in step_config and step_config["task"] in self.skip)): visited_steps.append( StepSpec( step_num=number, task_name=step_config.get("task", step_config.get("flow")), task_config=step_config.get("options", {}), task_class=None, project_config=project_config, from_flow=from_flow, skip= True, # someday we could use different vals for why skipped )) return visited_steps if "task" in step_config: name = step_config["task"] # get the base task_config from the project config, as a dict for easier manipulation. # will raise if the task doesn't exist / is invalid task_config = project_config.get_task(name) task_config_dict = copy.deepcopy(task_config.config) if "options" not in task_config_dict: task_config_dict["options"] = {} # merge the options together, from task_config all the way down through parent_options step_overrides = copy.deepcopy(parent_options.get(name, {})) step_overrides.update(step_config.get("options", {})) task_config_dict["options"].update(step_overrides) # merge UI options from task config and parent flow if "ui_options" not in task_config_dict: task_config_dict["ui_options"] = {} step_ui_overrides = copy.deepcopy(parent_ui_options.get(name, {})) step_ui_overrides.update(step_config.get("ui_options", {})) task_config_dict["ui_options"].update(step_ui_overrides) # merge checks from task config and flow step if "checks" not in task_config_dict: task_config_dict["checks"] = [] task_config_dict["checks"].extend(step_config.get("checks", [])) # merge runtime options if name in self.runtime_options: task_config_dict["options"].update(self.runtime_options[name]) # get implementation class. raise/fail if it doesn't exist, because why continue try: task_class = import_global(task_config_dict["class_path"]) except (ImportError, AttributeError): raise FlowConfigError(f"Task named {name} has bad classpath") visited_steps.append( StepSpec( step_num=number, task_name=name, task_config=task_config_dict, task_class=task_class, project_config=task_config.project_config, allow_failure=step_config.get("ignore_failure", False), from_flow=from_flow, when=step_config.get("when"), )) return visited_steps if "flow" in step_config: name = step_config["flow"] if from_flow: path = ".".join([from_flow, name]) else: path = name step_options = step_config.get("options", {}) step_ui_options = step_config.get("ui_options", {}) flow_config = project_config.get_flow(name) for sub_number, sub_stepconf in flow_config.steps.items(): # append the flow number to the child number, since its a LooseVersion. # e.g. if we're in step 2.3 which references a flow with steps 1-5, it # simply ends up as five steps: 2.3.1, 2.3.2, 2.3.3, 2.3.4, 2.3.5 # TODO: how does this work with nested flowveride? what does defining step 2.3.2 later do? num = f"{number}/{sub_number}" self._visit_step( number=num, step_config=sub_stepconf, project_config=flow_config.project_config, visited_steps=visited_steps, parent_options=step_options, parent_ui_options=step_ui_options, from_flow=path, ) return visited_steps
def _init_parsers(self): for cfg in self.parser_config: parser_class = import_global(cfg["class_path"]) self.parsers.append(parser_class(self, cfg["title"]))