Exemplo n.º 1
0
def get_workflow_jobs_from_parser(workflow):
    config_parser = load_path(PinballConfig.PARSER)(PinballConfig.PARSER_PARAMS)
    tokens = config_parser.get_workflow_tokens(workflow)
    jobs_data = []
    for token in tokens:
        name = Name.from_job_token_name(token.name)
        if name.job:
            assert name.workflow == workflow
            job = pickle.loads(token.data)
            jobs_data.append(JobData(workflow=workflow,
                                     instance=None,
                                     job=name.job,
                                     job_type=job.__class__.__name__,
                                     is_condition=job.IS_CONDITION,
                                     info=job.info(),
                                     inputs=job.inputs,
                                     outputs=job.outputs,
                                     emails=job.emails,
                                     max_attempts=job.max_attempts,
                                     retry_delay_sec=job.retry_delay_sec,
                                     warn_timeout_sec=job.warn_timeout_sec,
                                     abort_timeout_sec=job.abort_timeout_sec,
                                     priority=token.priority,
                                     status=Status.NEVER_RUN))
    return jobs_data
Exemplo n.º 2
0
 def _read_tokens_from_parser_params(self):
     """Read archived job tokens from the PinballConfig.PARSER_PARAMS.
     """
     config_parser = load_path(PinballConfig.PARSER)(
         PinballConfig.PARSER_PARAMS)
     tokens = config_parser.get_workflow_tokens(self._workflow)
     self._filter_job_tokens(tokens)
Exemplo n.º 3
0
def get_workflow_jobs_from_parser(workflow):
    config_parser = load_path(PinballConfig.PARSER)(
        PinballConfig.PARSER_PARAMS)
    tokens = config_parser.get_workflow_tokens(workflow)
    jobs_data = []
    for token in tokens:
        name = Name.from_job_token_name(token.name)
        if name.job:
            assert name.workflow == workflow
            job = pickle.loads(token.data)
            jobs_data.append(
                JobData(workflow=workflow,
                        instance=None,
                        job=name.job,
                        job_type=job.__class__.__name__,
                        is_condition=job.IS_CONDITION,
                        info=job.info(),
                        inputs=job.inputs,
                        outputs=job.outputs,
                        emails=job.emails,
                        max_attempts=job.max_attempts,
                        retry_delay_sec=job.retry_delay_sec,
                        warn_timeout_sec=job.warn_timeout_sec,
                        abort_timeout_sec=job.abort_timeout_sec,
                        priority=token.priority,
                        status=Status.NEVER_RUN))
    return jobs_data
    def _job_config_to_job(job_config, outputs):
        """Create a job from a job config.

        Args:
            job_config: The job config.
            outputs: The list of job output names.
        Returns:
            Pinball job constructed from the config.
        """
        # load template
        job_template = load_path(job_config.template)(
            job_config.job,
            max_attempts=job_config.max_attempts,
            emails=job_config.emails,
            priority=job_config.priority)
        return job_template.get_pinball_job(job_config.parents, outputs,
                                            job_config.template_params)
    def _condition_config_to_condition(condition_config, outputs):
        """Create a condition from a condition config.

        Args:
            condition_config: The condition config.
            outputs: The list of condition output names.
        Returns:
            Pinball job constructed from the config.
        """
        # load template
        condition_template = load_path(condition_config.template)(
            condition_config.job,
            max_attempts=condition_config.max_attempts,
            retry_delay_sec=condition_config.retry_delay_sec,
            emails=condition_config.emails,
            priority=condition_config.priority)
        return condition_template.get_pinball_condition(
            outputs, params=condition_config.template_params)
    def _job_config_to_job(job_config, outputs):
        """Create a job from a job config.

        Args:
            job_config: The job config.
            outputs: The list of job output names.
        Returns:
            Pinball job constructed from the config.
        """
        # load template
        job_template = load_path(job_config.template)(
            job_config.job,
            max_attempts=job_config.max_attempts,
            emails=job_config.emails,
            priority=job_config.priority)
        return job_template.get_pinball_job(job_config.parents,
                                            outputs,
                                            job_config.template_params)
    def _condition_config_to_condition(condition_config, outputs):
        """Create a condition from a condition config.

        Args:
            condition_config: The condition config.
            outputs: The list of condition output names.
        Returns:
            Pinball job constructed from the config.
        """
        # load template
        condition_template = load_path(condition_config.template)(
            condition_config.job,
            max_attempts=condition_config.max_attempts,
            retry_delay_sec=condition_config.retry_delay_sec,
            emails=condition_config.emails,
            priority=condition_config.priority)
        return condition_template.get_pinball_condition(
            outputs, params=condition_config.template_params)
Exemplo n.º 8
0
    def run(self, emailer, store):
        if not self._check_workflow_instances(emailer, self.workflow, store):
            LOG.warn('too many instances running for workflow %s', self.workflow)
            return None

        config_parser = load_path(PinballConfig.PARSER)(self.parser_params)
        workflow_tokens = config_parser.get_workflow_tokens(self.workflow)
        if not workflow_tokens:
            LOG.error('workflow %s not found', self.workflow)
            return None
        result = ModifyRequest()
        result.updates = workflow_tokens
        assert result.updates
        token = result.updates[0]
        name = Name.from_job_token_name(token.name)
        if not name.instance:
            name = Name.from_event_token_name(token.name)
        LOG.info('exporting workflow %s instance %s.  Its tokens are under %s',
                 name.workflow, name.instance, name.get_instance_prefix())
        return result
Exemplo n.º 9
0
    def run(self, emailer, store):
        if not self._check_workflow_instances(emailer, self.workflow, store):
            LOG.warn('too many instances running for workflow %s',
                     self.workflow)
            return None

        config_parser = load_path(PinballConfig.PARSER)(self.parser_params)
        workflow_tokens = config_parser.get_workflow_tokens(self.workflow)
        if not workflow_tokens:
            LOG.error('workflow %s not found', self.workflow)
            return None
        result = ModifyRequest()
        result.updates = workflow_tokens
        assert result.updates
        token = result.updates[0]
        name = Name.from_job_token_name(token.name)
        if not name.instance:
            name = Name.from_event_token_name(token.name)
        LOG.info('exporting workflow %s instance %s.  Its tokens are under %s',
                 name.workflow, name.instance, name.get_instance_prefix())
        return result
Exemplo n.º 10
0
    def __init__(self, params):
        """Initializes PyWorkflowParser object.

        Args:
            params - a dict contains the following keys:
                'workflows_config': maps to the fully qualified name of the
                    workflows config object described above.
                'job_repo_dir'[optional]: the root dir of the repo where all
                    user jobs are stored.
                'job_import_dirs_config'[optional]: maps to the fully qualified
                    name of the object that stores the list of dirs where user
                    jobs are defined. The dirs are relative path to
                    'job_repo_dir'.
        """
        assert 'workflows_config' in params
        super(PyWorkflowParser, self).__init__(params)

        self.parser_params = params
        self.workflows_config_str = params['workflows_config']
        self.workflows_config = load_path(self.workflows_config_str)
        self.workflows = {}
Exemplo n.º 11
0
    def __init__(self, params):
        """Initializes PyWorkflowParser object.

        Args:
            params - a dict contains the following keys:
                'workflows_config': maps to the fully qualified name of the
                    workflows config object described above.
                'job_repo_dir'[optional]: the root dir of the repo where all
                    user jobs are stored.
                'job_import_dirs_config'[optional]: maps to the fully qualified
                    name of the object that stores the list of dirs where user
                    jobs are defined. The dirs are relative path to
                    'job_repo_dir'.
        """
        assert 'workflows_config' in params
        super(PyWorkflowParser, self).__init__(params)

        self.parser_params = params
        self.workflows_config_str = params['workflows_config']
        self.workflows_config = load_path(self.workflows_config_str)
        self.workflows = {}
Exemplo n.º 12
0
def load_parser_with_caller(parser_name, parser_params, parser_caller):
    return load_path(parser_name)(annotate_parser_caller(
        parser_params, parser_caller))
Exemplo n.º 13
0
def load_parser_with_caller(parser_name, parser_params, parser_caller):
    return load_path(parser_name)(annotate_parser_caller(parser_params, parser_caller))
Exemplo n.º 14
0
 def _read_tokens_from_parser_params(self):
     """Read archived job tokens from the PinballConfig.PARSER_PARAMS.
     """
     config_parser = load_path(PinballConfig.PARSER)(PinballConfig.PARSER_PARAMS)
     tokens = config_parser.get_workflow_tokens(self._workflow)
     self._filter_job_tokens(tokens)