def _insert_token(self, handler): request = ModifyRequest() token = Token(name='/some_other_dir/some_token', data='some data') request.updates = [token] response = handler.modify(request) self.assertEqual(1, len(response.updates)) return response.updates[0]
def test_recursive(self): Options = collections.namedtuple('args', 'recursive force command_args') options = Options(recursive=True, force=True, command_args=['/some_path']) command = Rm() command.prepare(options) client = mock.Mock() token = Token(version=10, name='/some_path/some_token', owner='some_owner', expirationTime=10, data='some_data') query_response = QueryResponse(tokens=[[token]]) client.query.return_value = query_response modify_response = ModifyResponse() client.modify.return_value = modify_response output = command.execute(client, None) query = Query(namePrefix='/some_path') query_request = QueryRequest(queries=[query]) client.query.assert_called_once_with(query_request) modify_request = ModifyRequest(deletes=[token]) client.modify.assert_called_once_with(modify_request) self.assertEqual('removed 1 token(s)\n', output)
def _post_signal_tokens(self): """Add some signal tokens to the master.""" request = ModifyRequest(updates=[]) signal = Signal(action=Signal.EXIT) name = Name(signal='exit') signal_token = Token(name=name.get_signal_token_name()) signal_token.data = pickle.dumps(signal) request.updates.append(signal_token) signal = Signal(action=Signal.DRAIN) name.signal = 'drain' name.workflow = 'some_workflow' signal_token = Token(name=name.get_signal_token_name()) signal_token.data = pickle.dumps(signal) request.updates.append(signal_token) name.instance = '123' signal_token = Token(name=name.get_signal_token_name()) signal_token.data = pickle.dumps(signal) request.updates.append(signal_token) signal = Signal(action=Signal.ABORT) name.signal = 'abort' signal_token = Token(name=name.get_signal_token_name()) signal_token.data = pickle.dumps(signal) request.updates.append(signal_token) client = self._factory.get_client() client.modify(request)
def test_modity_empty(self): request = ModifyRequest() transaction = ModifyTransaction() # Make sure that prepare and commit do not throw an exception. transaction.prepare(request) transaction.commit(self._trie, self._get_blessed_version(), self._store)
def set_action(self, action): """Send a signal with a specific action to the master. Local signal store gets updated with the new action if it is successfully submitted to the master. If the communication with the master fails, locally stored signals get refreshed. Args: action: The action to set. """ attributes = {} if action == Signal.ABORT: attributes[Signal.TIMESTAMP_ATTR] = time.time() elif action == Signal.EXIT: attributes[Signal.GENERATION_ATTR] = PinballConfig.GENERATION signal = self._signals.get(action) if signal and signal.attributes == attributes: return # A signal with the same action but different data may already exist # in the master. signal_token = self._get_signal_token(action) if not signal_token: name = Name(workflow=self._workflow, instance=self._instance, signal=Signal.action_to_string(action)) signal_token = Token(name=name.get_signal_token_name()) signal = Signal(action, attributes) signal_token.data = pickle.dumps(signal) request = ModifyRequest(updates=[signal_token]) if self._send_request(request): self._signals[action] = signal
def set_attribute_if_missing(self, action, attribute, value): """Set an attribute value unless that attribute is already set. Args: action: The action whose attribute should be set. attribute: The attribute to set. value: The attribute value to set. Returns: True iff the attribute value was set. Return False if the signal is not set. """ if self.get_attribute(action, attribute) is not None: return False signal_token = self._get_signal_token(action) if not signal_token: return False signal = pickle.loads(signal_token.data) self._signals[action] = signal if self.get_attribute(action, attribute) is not None: return False signal.attributes[attribute] = value signal_token.data = pickle.dumps(signal) request = ModifyRequest(updates=[signal_token]) if self._send_request(request): self._signals[action] = signal return True return False
def test_update(self): Options = collections.namedtuple('args', 'name version owner ' 'expiration_time priority data ' 'command_args') options = Options(name='/some_path/some_token', version=10, owner='some_other_owner', expiration_time=100, priority=10, data='some_other_data', command_args=None) command = Update() command.prepare(options) client = mock.Mock() output_token = Token(version=11, name='/some_path/some_token', owner='some_other_owner', expirationTime=100, priority=10, data='some_other_data') response = ModifyResponse(updates=[output_token]) client.modify.return_value = response output = command.execute(client, None) input_token = Token(version=10, name='/some_path/some_token', owner='some_other_owner', expirationTime=100, priority=10, data='some_other_data') request = ModifyRequest(updates=[input_token]) client.modify.assert_called_once_with(request) self.assertEqual('updated %s\nupdated 1 token\n' % str(output_token), output)
def _update_owned_job_token(self): """Update owned job token in the master. Returns: True if the update was successful, otherwise False. """ assert self._owned_job_token request = ModifyRequest() request.updates = [self._owned_job_token] try: response = self._client.modify(request) except TokenMasterException: LOG.exception('error sending request %s', request) return False assert len(response.updates) == 1 self._owned_job_token = response.updates[0] return True
def run(self, emailer, store): if not self._check_workflow_instances(emailer, self.workflow, store): LOG.warn('too many instances running for workflow %s', self.workflow) return None config_parser = load_path(PinballConfig.PARSER)(self.parser_params) workflow_tokens = config_parser.get_workflow_tokens(self.workflow) if not workflow_tokens: LOG.error('workflow %s not found', self.workflow) return None result = ModifyRequest() result.updates = workflow_tokens assert result.updates token = result.updates[0] name = Name.from_job_token_name(token.name) if not name.instance: name = Name.from_event_token_name(token.name) LOG.info('exporting workflow %s instance %s. Its tokens are under %s', name.workflow, name.instance, name.get_instance_prefix()) return result
def _post_workflow_start_event_token(self): name = Name(workflow='some_workflow', instance='12345', job='parent_job', input_name=Name.WORKFLOW_START_INPUT, event='workflow_start_event') event = Event(creator='SimpleWorkflowTest') token = Token(name=name.get_event_token_name(), data=pickle.dumps(event)) request = ModifyRequest(updates=[token]) self._client.modify(request)
def _keep_job_token_in_runnable(self, job): """Keep the owned job token in the runnable group. Refresh the job token data field with the provided job object, release the ownership of the token, and return it to the runnable group. Args: job: The job that should be stored in the data field of the job token. """ assert self._owned_job_token request = ModifyRequest() self._owned_job_token.data = pickle.dumps(job) retry_delay_sec = job.retry_delay_sec if retry_delay_sec > 0: self._owned_job_token.expirationTime = (time.time() + retry_delay_sec) else: self._unown(self._owned_job_token) request.updates = [self._owned_job_token] self._send_request(request)
def remove_action(self, action): """Remove signal with a given action from the master. Args: action: The action to remove. """ if not self.is_signal_present(action): return signal_token = self._get_signal_token(action) if signal_token: request = ModifyRequest(deletes=[signal_token]) self._client.modify(request) del self._signals[action]
def execute(self, client, store): output = '' tokens = _get_tokens(self._prefix, self._recursive, client) deleted = 0 if not tokens: output += 'no tokens found\n' else: print 'removing:' for token in tokens: print '\t%s' % token.name if self._force or confirm('remove %d tokens' % len(tokens)): request = ModifyRequest(deletes=tokens) client.modify(request) deleted = len(tokens) output += 'removed %d token(s)\n' % deleted return output
def test_modify_deletes(self): request = ModifyRequest(deletes=[]) n_tokens_before = len(self._trie) some_token = copy.copy(self._trie['/some_dir/some_token_0']) request.deletes.append(some_token) some_other_token = copy.copy( self._trie['/some_dir/some_token_0/some_other_token_0']) request.deletes.append(some_other_token) transaction = ModifyTransaction() transaction.prepare(request) transaction.commit(self._trie, self._get_blessed_version(), self._store) n_tokens_after = len(self._trie) # We deleted two things from self._trie. self.assertEqual(n_tokens_before - 2, n_tokens_after) self._check_version_uniqueness()
def test_modify_no_name_change(self): """Do not allow changing token names.""" request = ModifyRequest(updates=[]) # pickle gets maximum recursion depth exceeded when traversing # the trie, probably a bug in pickle. Setting the recursion limit # to a high number fixes it. sys.setrecursionlimit(10000) trie_before = pickle.dumps(self._trie) token = copy.copy(self._trie['/some_dir/some_token_0']) token.name = '/some_other_dir/some_token_0' request.updates.append(token) transaction = ModifyTransaction() transaction.prepare(request) self.assertRaises(TokenMasterException, transaction.commit, self._trie, self._get_blessed_version(), self._store) trie_after = pickle.dumps(self._trie) self.assertEqual(trie_before, trie_after)
def execute(self, client, store): output = '' token = Token(name=self._name, version=self._version, owner=self._owner, expirationTime=self._expiration_time, priority=self._priority, data=self._data) request = ModifyRequest(updates=[token]) response = client.modify(request) assert len(response.updates) == 1 if token.version is None: action = 'inserted' else: action = 'updated' output += '%s %s\n' % (action, str(response.updates[0])) output += 'updated 1 token\n' return output
def test_modify_deletes_and_updates(self): """Updates and deletes in a single request.""" request = ModifyRequest(updates=[], deletes=[]) n_tokens_before = len(self._trie) delete_token = copy.copy(self._trie['/some_dir/some_token_0']) request.deletes.append(delete_token) update_token = copy.copy(self._trie['/some_dir/some_token_1']) update_token.data = 'some other data' request.updates.append(update_token) transaction = ModifyTransaction() transaction.prepare(request) response = transaction.commit(self._trie, self._get_blessed_version(), self._store) self.assertEqual(1, len(response.updates)) n_tokens_after = len(self._trie) self.assertEqual(n_tokens_before - 1, n_tokens_after) self._check_version_uniqueness()
def _update_tokens(self): """Update tokens modified during schedule execution in the master. """ assert self._owned_schedule_token if not self._request: self._request = ModifyRequest() if not self._request.updates: self._request.updates = [] self._request.updates.append(self._owned_schedule_token) schedule = pickle.loads(self._owned_schedule_token.data) if schedule.workflow == 'experiments': LOG.info('updating tokens for workflow experiments %s', self._request) try: self._client.modify(self._request) except TokenMasterException: LOG.exception('') finally: self._owned_schedule_token = None self._request = None
def _move_job_token_to_runnable(self, job_token, triggering_event_tokens): """Move a job token to the runnable branch of the token tree. Token tree is the global, hierarchically structured token namespace. Args: job_token: The job token to make runnable. triggering_event_tokens: The list of events used to trigger the job. These events will be removed from the master in the same call to that makes the job token runnable. Returns: True on success, otherwise False. """ name = Name.from_job_token_name(job_token.name) name.job_state = Name.RUNNABLE_STATE job = pickle.loads(job_token.data) Worker._add_events_to_job(job, triggering_event_tokens) runnable_job_token = Token(name=name.get_job_token_name(), priority=job_token.priority, data=pickle.dumps(job)) request = ModifyRequest(updates=[runnable_job_token], deletes=triggering_event_tokens + [job_token]) return self._send_request(request)
def _move_job_token_to_waiting(self, job, succeeded): """Move the owned job token to the waiting group. If the job succeeded, also post events to job outputs. If the job failed or it is the final job (a job with no outputs), post an archive signal to finish the workflow. Args: job: The job that should be stored in the data field of the waiting job token. succeeded: True if the job succeeded, otherwise False. """ assert self._owned_job_token name = Name.from_job_token_name(self._owned_job_token.name) name.job_state = Name.WAITING_STATE waiting_job_token = Token(name=name.get_job_token_name(), priority=self._owned_job_token.priority, data=pickle.dumps(job)) request = ModifyRequest(deletes=[self._owned_job_token], updates=[waiting_job_token]) if succeeded: request.updates.extend(self._get_output_event_tokens(job)) if not job.outputs or not succeeded: # This is either the only job in the workflow with no outputs or a # failed job. In either case, the workflow is done. signaller = Signaller(self._client, workflow=name.workflow, instance=name.instance) if not signaller.is_action_set(Signal.ARCHIVE): signal_name = Name(workflow=name.workflow, instance=name.instance, signal=Signal.action_to_string( Signal.ARCHIVE)) signal = Signal(Signal.ARCHIVE) signal_token = Token(name=signal_name.get_signal_token_name()) signal_token.data = pickle.dumps(signal) request.updates.append(signal_token) self._send_request(request)
def test_modify_updates(self): request = ModifyRequest(updates=[]) n_tokens_before = len(self._trie) token = copy.copy(self._trie['/some_dir/some_token_0']) token.data = 'some other data' request.updates.append(token) new_token = Token(name='/some_other_dir/some_token', data='some data') request.updates.append(new_token) transaction = ModifyTransaction() transaction.prepare(request) response = transaction.commit(self._trie, self._get_blessed_version(), self._store) self.assertEqual(2, len(response.updates)) self.assertNotEqual(token.version, response.updates[0].version) self.assertEqual(token.name, response.updates[0].name) self.assertEqual(token.data, response.updates[0].data) self.assertLess(0, response.updates[1].version) self.assertEqual(new_token.name, response.updates[1].name) self.assertEqual(new_token.data, response.updates[1].data) n_tokens_after = len(self._trie) self.assertEqual(n_tokens_before + 1, n_tokens_after) self._check_version_uniqueness()
def _post_schedule_token(self): """Add schedule token to the master.""" request = ModifyRequest() request.updates = [SchedulerTestCase._get_schedule_token()] self._client.modify(request)
def run(self, emailer, store): return ModifyRequest(updates=[Token()])
def _post_job_tokens(self): """Add waiting job tokens to the master.""" request = ModifyRequest(updates=[]) request.updates.append(self._get_parent_job_token()) request.updates.append(self._get_child_job_token()) self._client.modify(request)