Esempio n. 1
0
    def _get_output_event_tokens(self, job):
        """Create output event tokens for the owned job token.

        Args:
            job: The job which output tokens should be generated.
        Returns:
            A list of event tokens corresponding to the outputs of the owned
            job token.
        """
        assert self._owned_job_token
        job_name = Name.from_job_token_name(self._owned_job_token.name)
        output_name = Name()
        output_name.workflow = job_name.workflow
        output_name.instance = job_name.instance
        output_name.input = job_name.job
        event_tokens = []
        for output in job.outputs:
            output_name.job = output
            output_name.event = get_unique_name()
            event = Event(creator=self._name)
            assert job.history
            execution_record = job.history[-1]
            event.attributes = execution_record.get_event_attributes()
            event_tokens.append(
                Token(name=output_name.get_event_token_name(),
                      data=pickle.dumps(event)))
        return event_tokens
Esempio n. 2
0
    def _get_output_event_tokens(self, job):
        """Create output event tokens for the owned job token.

        Args:
            job: The job which output tokens should be generated.
        Returns:
            A list of event tokens corresponding to the outputs of the owned
            job token.
        """
        assert self._owned_job_token
        job_name = Name.from_job_token_name(self._owned_job_token.name)
        output_name = Name()
        output_name.workflow = job_name.workflow
        output_name.instance = job_name.instance
        output_name.input = job_name.job
        event_tokens = []
        for output in job.outputs:
            output_name.job = output
            output_name.event = get_unique_name()
            event = Event(creator=self._name)
            assert job.history
            execution_record = job.history[-1]
            event.attributes = execution_record.get_event_attributes()
            event_tokens.append(Token(name=output_name.get_event_token_name(),
                                      data=pickle.dumps(event)))
        return event_tokens
Esempio n. 3
0
 def _post_event_tokens(self):
     """Add some event tokens to the master."""
     request = ModifyRequest(updates=[])
     name = Name(workflow='some_workflow', instance='12345')
     for job_id in range(0, 2):
         for input_id in range(0, 2):
             for event_id in range(0, 2):
                 name.job = 'some_job_%d' % job_id
                 name.input = 'some_input_%d' % input_id
                 name.event = 'some_event_%d' % event_id
                 event_token = Token(name=name.get_event_token_name())
                 request.updates.append(event_token)
     client = self._factory.get_client()
     client.modify(request)
Esempio n. 4
0
 def _post_job_tokens(self):
     """Add some job tokens to the master."""
     request = ModifyRequest(updates=[])
     name = Name(workflow='some_workflow', instance='12345')
     for job_id in range(0, 2):
         if job_id % 2 == 0:
             name.job_state = Name.WAITING_STATE
         else:
             name.job_state = Name.RUNNABLE_STATE
         name.job = 'some_job_%d' % job_id
         job_token = Token(name=name.get_job_token_name())
         request.updates.append(job_token)
     client = self._factory.get_client()
     client.modify(request)
Esempio n. 5
0
 def get_event_names(self, workflow_name, instance, job, input_name):
     """Return names of events under a workflow instance, job, and input."""
     request = GroupRequest()
     name = Name()
     name.workflow = workflow_name
     name.instance = instance
     name.job = job
     name.input = input_name
     request.namePrefix = name.get_input_prefix()
     request.groupSuffix = Name.DELIMITER
     response = self._client.group(request)
     events = []
     if response.counts:
         for event in response.counts.keys():
             name = Name.from_event_token_name(event)
             events.append(name.event)
     return events
Esempio n. 6
0
 def get_event_names(self, workflow_name, instance, job, input_name):
     """Return names of events under a workflow instance, job, and input."""
     request = GroupRequest()
     name = Name()
     name.workflow = workflow_name
     name.instance = instance
     name.job = job
     name.input = input_name
     request.namePrefix = name.get_input_prefix()
     request.groupSuffix = Name.DELIMITER
     response = self._client.group(request)
     events = []
     if response.counts:
         for event in response.counts.keys():
             name = Name.from_event_token_name(event)
             events.append(name.event)
     return events
Esempio n. 7
0
    def _make_job_runnable(self, job_token):
        """Attempt to make a job runnable.

        Query event tokens in job inputs.  If a combination of triggering
        events exist, remove those events and make the job runnable.
        Otherwise, do nothing.

        Args:
            job_token: The job token to make runnable.
        Returns:
            True if there were no errors during communication with the master,
            otherwise False.
        """
        job = pickle.loads(job_token.data)
        name = Name.from_job_token_name(job_token.name)
        request = QueryRequest(queries=[])
        # TODO(pawel): handle jobs with no dependencies
        assert job.inputs
        for input_name in job.inputs:
            prefix = Name()
            prefix.workflow = name.workflow
            prefix.instance = name.instance
            prefix.job = name.job
            prefix.input = input_name
            query = Query()
            query.namePrefix = prefix.get_input_prefix()
            query.maxTokens = 1
            request.queries.append(query)
        try:
            response = self._client.query(request)
        except TokenMasterException:
            # TODO(pawel): add a retry count and fail if a limit is reached.
            LOG.exception('error sending request %s', request)
            return False
        triggering_events = Worker._get_triggering_events(response.tokens)
        if triggering_events:
            return self._move_job_token_to_runnable(job_token,
                                                    triggering_events)
        return True
Esempio n. 8
0
    def _make_job_runnable(self, job_token):
        """Attempt to make a job runnable.

        Query event tokens in job inputs.  If a combination of triggering
        events exist, remove those events and make the job runnable.
        Otherwise, do nothing.

        Args:
            job_token: The job token to make runnable.
        Returns:
            True if there were no errors during communication with the master,
            otherwise False.
        """
        job = pickle.loads(job_token.data)
        name = Name.from_job_token_name(job_token.name)
        request = QueryRequest(queries=[])
        # TODO(pawel): handle jobs with no dependencies
        assert job.inputs
        for input_name in job.inputs:
            prefix = Name()
            prefix.workflow = name.workflow
            prefix.instance = name.instance
            prefix.job = name.job
            prefix.input = input_name
            query = Query()
            query.namePrefix = prefix.get_input_prefix()
            query.maxTokens = 1
            request.queries.append(query)
        try:
            response = self._client.query(request)
        except TokenMasterException:
            # TODO(pawel): add a retry count and fail if a limit is reached.
            LOG.exception('error sending request %s', request)
            return False
        triggering_events = Worker._get_triggering_events(response.tokens)
        if triggering_events:
            return self._move_job_token_to_runnable(job_token,
                                                    triggering_events)
        return True