Esempio n. 1
0
    def _get_instances_using_cache(self, workflow):
        """Get workflow instances, preferably from the cache.

        As a side effect, archived instances that do not exist in the cache
        will be added to the cache.

        Args:
            workflow: The name of the workflow whose instances we are
                interested in.
        Returns:
            List of instances for the given workflow.
        """
        name = Name(workflow=workflow)
        workflow_prefix = name.get_workflow_prefix()
        workflow_token_names = self._store.read_token_names(
            name_prefix=workflow_prefix)
        instances_prefixes = DataBuilder._get_instance_prefixes(
            workflow_token_names)
        result = []
        for prefix in instances_prefixes:
            name = Name.from_instance_prefix(prefix)
            assert name.workflow and name.instance, (
                'Expected instance prefix, found %s' % prefix)
            result.append(self.get_instance(name.workflow, name.instance))
        return result
Esempio n. 2
0
    def _get_job_tokens(self, workflow=None, instance=None, job_state=None,
                        job=None):
        """Extract job tokens from the store.

        Args:
            workflow: The name of the workflow whose jobs we are interested in.
            instance: The name of the instance whose jobs we are interested in.
            job_state: The state of the jobs we are interested in.
            job: The name of the job we are interested in.
        Returns:
            List of jobs matching the specification.
        """
        name = Name(workflow=workflow, instance=instance, job_state=job_state,
                    job=job)
        if name.job:
            prefix = name.get_job_token_name()
        elif name.job_state:
            prefix = name.get_job_state_prefix()
        elif name.instance:
            prefix = name.get_job_prefix()
        elif name.workflow:
            prefix = name.get_workflow_prefix()
        else:
            prefix = ''
        tokens = self._store.read_tokens(name_prefix=prefix)
        result = []
        for token in tokens:
            token_name = Name.from_job_token_name(token.name)
            if token_name.get_job_token_name():
                # This is a job token.
                if not job or job == token_name.job:
                    # We matched the prefix so if we are looking for a specific
                    # job, its names must match exactly.
                    result.append(token)
        return result
Esempio n. 3
0
    def _get_jobs(self, workflow, job):
        """Get job definitions from the store across all workflow instances.

        Args:
            workflow: The name of the job workflow.
            instance: The name of the job instance.
            job: The name of the job.
        Returns:
            Matching job definition.
        """
        name = Name(workflow=workflow)
        name_prefix = name.get_workflow_prefix()
        # This is a bit hacky since we bypass the Name module where all the
        # token naming logic is supposed to be located.
        # TODO(pawel): extend the Name module to support abstractions needed
        # here.
        name_infix = '/job/'
        name_suffix = '/%s' % job
        job_tokens = self._store.read_tokens(name_prefix=name_prefix,
                                             name_infix=name_infix,
                                             name_suffix=name_suffix)
        result = []
        for job_token in job_tokens:
            job_record = pickle.loads(job_token.data)
            result.append(job_record)
        return result
Esempio n. 4
0
    def _get_instances_using_cache(self, workflow):
        """Get workflow instances, preferably from the cache.

        As a side effect, archived instances that do not exist in the cache
        will be added to the cache.

        Args:
            workflow: The name of the workflow whose instances we are
                interested in.
        Returns:
            List of instances for the given workflow.
        """
        name = Name(workflow=workflow)
        workflow_prefix = name.get_workflow_prefix()
        workflow_token_names = self._store.read_token_names(
            name_prefix=workflow_prefix)
        instances_prefixes = DataBuilder._get_instance_prefixes(
            workflow_token_names)
        result = []
        for prefix in instances_prefixes:
            name = Name.from_instance_prefix(prefix)
            assert name.workflow and name.instance, (
                'Expected instance prefix, found %s' % prefix)
            result.append(self.get_instance(name.workflow, name.instance))
        return result
Esempio n. 5
0
    def _get_jobs(self, workflow, job):
        """Get job definitions from the store across all workflow instances.

        Args:
            workflow: The name of the job workflow.
            instance: The name of the job instance.
            job: The name of the job.
        Returns:
            Matching job definition.
        """
        name = Name(workflow=workflow)
        name_prefix = name.get_workflow_prefix()
        # This is a bit hacky since we bypass the Name module where all the
        # token naming logic is supposed to be located.
        # TODO(pawel): extend the Name module to support abstractions needed
        # here.
        name_infix = '/job/'
        name_suffix = '/%s' % job
        job_tokens = self._store.read_tokens(name_prefix=name_prefix,
                                             name_infix=name_infix,
                                             name_suffix=name_suffix)
        result = []
        for job_token in job_tokens:
            job_record = pickle.loads(job_token.data)
            result.append(job_record)
        return result
Esempio n. 6
0
    def _read_tokens_from_client(self, client):
        """Read archived job tokens from the client.

        Args:
            client: The client to read tokens from.
        """
        name = Name(workflow=self._workflow, instance=self._instance)
        query = Query(namePrefix=name.get_workflow_prefix())
        request = QueryRequest(queries=[query])
        response = client.query(request)
        assert len(response.tokens) == 1
        tokens = response.tokens[0]
        self._filter_job_tokens(tokens)
        self._filter_event_tokens(tokens)
Esempio n. 7
0
    def _read_tokens_from_client(self, client):
        """Read archived job tokens from the client.

        Args:
            client: The client to read tokens from.
        """
        name = Name(workflow=self._workflow, instance=self._instance)
        query = Query(namePrefix=name.get_workflow_prefix())
        request = QueryRequest(queries=[query])
        response = client.query(request)
        assert len(response.tokens) == 1
        tokens = response.tokens[0]
        self._filter_job_tokens(tokens)
        self._filter_event_tokens(tokens)
Esempio n. 8
0
 def get_workflow_instances(self, workflow_name):
     """Return list of instances of a given workflow."""
     request = GroupRequest()
     name = Name()
     name.workflow = workflow_name
     request.namePrefix = name.get_workflow_prefix()
     request.groupSuffix = Name.DELIMITER
     response = self._client.group(request)
     instance_names = []
     if response.counts:
         for prefix in response.counts.keys():
             name = Name.from_instance_prefix(prefix)
             if name.instance:
                 instance_names.append(name.instance)
     return instance_names
Esempio n. 9
0
 def get_workflow_instances(self, workflow_name):
     """Return list of instances of a given workflow."""
     request = GroupRequest()
     name = Name()
     name.workflow = workflow_name
     request.namePrefix = name.get_workflow_prefix()
     request.groupSuffix = Name.DELIMITER
     response = self._client.group(request)
     instance_names = []
     if response.counts:
         for prefix in response.counts.keys():
             name = Name.from_instance_prefix(prefix)
             if name.instance:
                 instance_names.append(name.instance)
     return instance_names
Esempio n. 10
0
    def _get_job_tokens(self,
                        workflow=None,
                        instance=None,
                        job_state=None,
                        job=None):
        """Extract job tokens from the store.

        Args:
            workflow: The name of the workflow whose jobs we are interested in.
            instance: The name of the instance whose jobs we are interested in.
            job_state: The state of the jobs we are interested in.
            job: The name of the job we are interested in.
        Returns:
            List of jobs matching the specification.
        """
        name = Name(workflow=workflow,
                    instance=instance,
                    job_state=job_state,
                    job=job)
        if name.job:
            prefix = name.get_job_token_name()
        elif name.job_state:
            prefix = name.get_job_state_prefix()
        elif name.instance:
            prefix = name.get_job_prefix()
        elif name.workflow:
            prefix = name.get_workflow_prefix()
        else:
            prefix = ''
        tokens = self._store.read_tokens(name_prefix=prefix)
        result = []
        for token in tokens:
            token_name = Name.from_job_token_name(token.name)
            if token_name.get_job_token_name():
                # This is a job token.
                if not job or job == token_name.job:
                    # We matched the prefix so if we are looking for a specific
                    # job, its names must match exactly.
                    result.append(token)
        return result