def get_workflow_jobs_from_parser(workflow): config_parser = load_parser_with_caller(PinballConfig.PARSER, PinballConfig.get_parser_params(workflow), ParserCaller.UI) tokens = config_parser.get_workflow_tokens(workflow) jobs_data = [] for token in tokens: name = Name.from_job_token_name(token.name) if name.job: assert name.workflow == workflow job = pickle.loads(token.data) jobs_data.append(JobData(workflow=workflow, instance=None, job=name.job, job_type=job.__class__.__name__, is_condition=job.IS_CONDITION, info=job.info(), inputs=job.inputs, outputs=job.outputs, emails=job.emails, max_attempts=job.max_attempts, retry_delay_sec=job.retry_delay_sec, warn_timeout_sec=job.warn_timeout_sec, abort_timeout_sec=job.abort_timeout_sec, priority=token.priority, status=Status.NEVER_RUN)) return jobs_data
def _read_tokens_from_parser_params(self,workflow): """Read archived job tokens from the PinballConfig.PARSER_PARAMS. """ config_parser = load_parser_with_caller(PinballConfig.PARSER, PinballConfig.get_parser_params(workflow), ParserCaller.ANALYZER) tokens = config_parser.get_workflow_tokens(self._workflow) self._filter_job_tokens(tokens)
def __init__( self, next_run_time=None, recurrence_seconds=None, overrun_policy=OverrunPolicy.SKIP, parser_params=None, workflow=None, emails=None, max_running_instances=None): Schedule.__init__(self, next_run_time, recurrence_seconds, overrun_policy) if parser_params is None: parser_params = PinballConfig.get_parser_params(workflow) self.parser_params = annotate_parser_caller( parser_params, ParserCaller.SCHEDULE) self.workflow = workflow self.emails = emails if emails is not None else [] self.max_running_instances = max_running_instances if max_running_instances \ else PinballConfig.DEFAULT_MAX_WORKFLOW_RUNNING_INSTANCES