Exemplo n.º 1
0
def get_workflow_jobs_from_parser(workflow):
    config_parser = load_parser_with_caller(PinballConfig.PARSER,
                                            PinballConfig.get_parser_params(workflow),
                                            ParserCaller.UI)
    tokens = config_parser.get_workflow_tokens(workflow)
    jobs_data = []
    for token in tokens:
        name = Name.from_job_token_name(token.name)
        if name.job:
            assert name.workflow == workflow
            job = pickle.loads(token.data)
            jobs_data.append(JobData(workflow=workflow,
                                     instance=None,
                                     job=name.job,
                                     job_type=job.__class__.__name__,
                                     is_condition=job.IS_CONDITION,
                                     info=job.info(),
                                     inputs=job.inputs,
                                     outputs=job.outputs,
                                     emails=job.emails,
                                     max_attempts=job.max_attempts,
                                     retry_delay_sec=job.retry_delay_sec,
                                     warn_timeout_sec=job.warn_timeout_sec,
                                     abort_timeout_sec=job.abort_timeout_sec,
                                     priority=token.priority,
                                     status=Status.NEVER_RUN))
    return jobs_data
Exemplo n.º 2
0
def get_workflow_jobs_from_parser(workflow, parser_caller):
    config_parser = load_parser_with_caller(PinballConfig.PARSER,
                                            PinballConfig.PARSER_PARAMS,
                                            parser_caller)
    tokens = config_parser.get_workflow_tokens(workflow)
    jobs_data = []
    for token in tokens:
        name = Name.from_job_token_name(token.name)
        if name.job:
            assert name.workflow == workflow
            job = pickle.loads(token.data)
            jobs_data.append(JobData(workflow=workflow,
                                     instance=None,
                                     job=name.job,
                                     job_type=job.__class__.__name__,
                                     is_condition=job.IS_CONDITION,
                                     info=job.info(),
                                     inputs=job.inputs,
                                     outputs=job.outputs,
                                     emails=job.emails,
                                     max_attempts=job.max_attempts,
                                     retry_delay_sec=job.retry_delay_sec,
                                     warn_timeout_sec=job.warn_timeout_sec,
                                     abort_timeout_sec=job.abort_timeout_sec,
                                     priority=token.priority,
                                     status=Status.NEVER_RUN))
    return jobs_data
Exemplo n.º 3
0
 def _read_tokens_from_parser_params(self,workflow):
     """Read archived job tokens from the PinballConfig.PARSER_PARAMS.
     """
     config_parser = load_parser_with_caller(PinballConfig.PARSER,
                                             PinballConfig.get_parser_params(workflow),
                                             ParserCaller.ANALYZER)
     tokens = config_parser.get_workflow_tokens(self._workflow)
     self._filter_job_tokens(tokens)
Exemplo n.º 4
0
 def _read_tokens_from_parser_params(self):
     """Read archived job tokens from the PinballConfig.PARSER_PARAMS.
     """
     config_parser = load_parser_with_caller(PinballConfig.PARSER,
                                             PinballConfig.PARSER_PARAMS,
                                             ParserCaller.ANALYZER)
     tokens = config_parser.get_workflow_tokens(self._workflow)
     self._filter_job_tokens(tokens)
Exemplo n.º 5
0
    def run(self, emailer, store):
        if not self._check_workflow_instances(emailer, self.workflow, store):
            LOG.warn('too many instances running for workflow %s',
                     self.workflow)
            return None

        config_parser = load_parser_with_caller(PinballConfig.PARSER,
                                                self.parser_params,
                                                ParserCaller.SCHEDULE)
        workflow_tokens = config_parser.get_workflow_tokens(self.workflow)
        if not workflow_tokens:
            LOG.error('workflow %s not found', self.workflow)
            return None
        result = ModifyRequest()
        result.updates = workflow_tokens
        assert result.updates
        token = result.updates[0]
        name = Name.from_job_token_name(token.name)
        if not name.instance:
            name = Name.from_event_token_name(token.name)
        LOG.info('exporting workflow %s instance %s.  Its tokens are under %s',
                 name.workflow, name.instance, name.get_instance_prefix())
        return result
Exemplo n.º 6
0
    def run(self, emailer, store):
        if not self._check_workflow_instances(emailer, self.workflow, store):
            LOG.warn('too many instances running for workflow %s', self.workflow)
            return None

        config_parser = load_parser_with_caller(
            PinballConfig.PARSER,
            self.parser_params,
            ParserCaller.SCHEDULE
        )
        workflow_tokens = config_parser.get_workflow_tokens(self.workflow)
        if not workflow_tokens:
            LOG.error('workflow %s not found', self.workflow)
            return None
        result = ModifyRequest()
        result.updates = workflow_tokens
        assert result.updates
        token = result.updates[0]
        name = Name.from_job_token_name(token.name)
        if not name.instance:
            name = Name.from_event_token_name(token.name)
        LOG.info('exporting workflow %s instance %s.  Its tokens are under %s',
                 name.workflow, name.instance, name.get_instance_prefix())
        return result