Esempio n. 1
0
 def get_event_names(self, workflow_name, instance, job, input_name):
     """Return names of events under a workflow instance, job, and input."""
     request = GroupRequest()
     name = Name()
     name.workflow = workflow_name
     name.instance = instance
     name.job = job
     name.input = input_name
     request.namePrefix = name.get_input_prefix()
     request.groupSuffix = Name.DELIMITER
     response = self._client.group(request)
     events = []
     if response.counts:
         for event in response.counts.keys():
             name = Name.from_event_token_name(event)
             events.append(name.event)
     return events
Esempio n. 2
0
 def get_event_names(self, workflow_name, instance, job, input_name):
     """Return names of events under a workflow instance, job, and input."""
     request = GroupRequest()
     name = Name()
     name.workflow = workflow_name
     name.instance = instance
     name.job = job
     name.input = input_name
     request.namePrefix = name.get_input_prefix()
     request.groupSuffix = Name.DELIMITER
     response = self._client.group(request)
     events = []
     if response.counts:
         for event in response.counts.keys():
             name = Name.from_event_token_name(event)
             events.append(name.event)
     return events
Esempio n. 3
0
    def _generate_missing_events(self, job_names):
        """Generate external events required to run all jobs in a set.

        For a set of jobs (a subset of all jobs in the workflow), produce
        events satisfying upstream dependencies external to that set.  E.g.,
        for job dependency structure like this:

        A1  A2
         | /
        B1  B2
         |
        C1  C2
         | /
        D1

        and job_names = (C1, D1) we would generate events satisfying the
        following deps: B1->C1, C2->D1.

        Args:
            job_names: The set of job names whose external deps are to be
                satisfied by the generated events.
        """
        input_prefixes = set()
        for job_name in job_names:
            job = self._jobs[job_name]
            for job_input in job.inputs:
                if job_input not in job_names:
                    name = Name(workflow=self._workflow,
                                instance=self._instance,
                                job=job_name,
                                input_name=job_input,
                                event='poison_%d' % len(input_prefixes))
                    input_prefix = name.get_input_prefix()
                    if input_prefix not in input_prefixes:
                        input_prefixes.add(input_prefix)
                        event_token_name = name.get_event_token_name()
                        if not event_token_name in self._existing_events:
                            self._new_events[
                                name.get_event_token_name()] = Event(
                                    'analyzer')
Esempio n. 4
0
    def _generate_missing_events(self, job_names):
        """Generate external events required to run all jobs in a set.

        For a set of jobs (a subset of all jobs in the workflow), produce
        events satisfying upstream dependencies external to that set.  E.g.,
        for job dependency structure like this:

        A1  A2
         | /
        B1  B2
         |
        C1  C2
         | /
        D1

        and job_names = (C1, D1) we would generate events satisfying the
        following deps: B1->C1, C2->D1.

        Args:
            job_names: The set of job names whose external deps are to be
                satisfied by the generated events.
        """
        input_prefixes = set()
        for job_name in job_names:
            job = self._jobs[job_name]
            for job_input in job.inputs:
                if job_input not in job_names:
                    name = Name(workflow=self._workflow,
                                instance=self._instance,
                                job=job_name,
                                input_name=job_input,
                                event='poison_%d' % len(input_prefixes))
                    input_prefix = name.get_input_prefix()
                    if input_prefix not in input_prefixes:
                        input_prefixes.add(input_prefix)
                        event_token_name = name.get_event_token_name()
                        if not event_token_name in self._existing_events:
                            self._new_events[
                                name.get_event_token_name()] = Event(
                                    'analyzer')
Esempio n. 5
0
    def _make_job_runnable(self, job_token):
        """Attempt to make a job runnable.

        Query event tokens in job inputs.  If a combination of triggering
        events exist, remove those events and make the job runnable.
        Otherwise, do nothing.

        Args:
            job_token: The job token to make runnable.
        Returns:
            True if there were no errors during communication with the master,
            otherwise False.
        """
        job = pickle.loads(job_token.data)
        name = Name.from_job_token_name(job_token.name)
        request = QueryRequest(queries=[])
        # TODO(pawel): handle jobs with no dependencies
        assert job.inputs
        for input_name in job.inputs:
            prefix = Name()
            prefix.workflow = name.workflow
            prefix.instance = name.instance
            prefix.job = name.job
            prefix.input = input_name
            query = Query()
            query.namePrefix = prefix.get_input_prefix()
            query.maxTokens = 1
            request.queries.append(query)
        try:
            response = self._client.query(request)
        except TokenMasterException:
            # TODO(pawel): add a retry count and fail if a limit is reached.
            LOG.exception('error sending request %s', request)
            return False
        triggering_events = Worker._get_triggering_events(response.tokens)
        if triggering_events:
            return self._move_job_token_to_runnable(job_token,
                                                    triggering_events)
        return True
Esempio n. 6
0
    def _make_job_runnable(self, job_token):
        """Attempt to make a job runnable.

        Query event tokens in job inputs.  If a combination of triggering
        events exist, remove those events and make the job runnable.
        Otherwise, do nothing.

        Args:
            job_token: The job token to make runnable.
        Returns:
            True if there were no errors during communication with the master,
            otherwise False.
        """
        job = pickle.loads(job_token.data)
        name = Name.from_job_token_name(job_token.name)
        request = QueryRequest(queries=[])
        # TODO(pawel): handle jobs with no dependencies
        assert job.inputs
        for input_name in job.inputs:
            prefix = Name()
            prefix.workflow = name.workflow
            prefix.instance = name.instance
            prefix.job = name.job
            prefix.input = input_name
            query = Query()
            query.namePrefix = prefix.get_input_prefix()
            query.maxTokens = 1
            request.queries.append(query)
        try:
            response = self._client.query(request)
        except TokenMasterException:
            # TODO(pawel): add a retry count and fail if a limit is reached.
            LOG.exception('error sending request %s', request)
            return False
        triggering_events = Worker._get_triggering_events(response.tokens)
        if triggering_events:
            return self._move_job_token_to_runnable(job_token,
                                                    triggering_events)
        return True