def _get_instance_using_cache(self, workflow, instance): """Get workflow instance, preferably from the cache. As a side effect, if the instance is archived and it does not exist in the cache, it will be added to the cache. Args: workflow: The name of the workflow whose instance we are interested in. instance: The instance we are interested in. Returns: The workflow instance or None if it was not found. """ name = Name(workflow=workflow, instance=instance) instance_prefix = name.get_instance_prefix() data = self._store.get_cached_data(instance_prefix) if data: instance_data = pickle.loads(data) else: # Cache only archived instances. if self._store.read_archived_token_names( name_prefix=instance_prefix): # The ordering of operations is important. We need to make # sure that we add to the cache instance data constructed from # the archived tokens. instance_data = self._get_instance_no_cache(workflow, instance) self._store.set_cached_data(instance_prefix, pickle.dumps(instance_data)) else: instance_data = self._get_instance_no_cache(workflow, instance) return instance_data
def _get_instance_using_cache(self, workflow, instance): """Get workflow instance, preferably from the cache. As a side effect, if the instance is archived and it does not exist in the cache, it will be added to the cache. Args: workflow: The name of the workflow whose instance we are interested in. instance: The instance we are interested in. Returns: The workflow instance or None if it was not found. """ name = Name(workflow=workflow, instance=instance) instance_prefix = name.get_instance_prefix() data = self._store.get_cached_data(instance_prefix) if data: instance_data = pickle.loads(data) else: # Cache only archived instances. if self._store.read_archived_token_names( name_prefix=instance_prefix): # The ordering of operations is important. We need to make # sure that we add to the cache instance data constructed from # the archived tokens. instance_data = self._get_instance_no_cache(workflow, instance) self._store.set_cached_data(instance_prefix, pickle.dumps(instance_data)) else: instance_data = self._get_instance_no_cache(workflow, instance) return instance_data
def _read_tokens_from_store(self, store): """Read archived job tokens from the store. Args: store: The store to read tokens from. """ name = Name(workflow=self._workflow, instance=self._instance) tokens = store.read_archived_tokens( name_prefix=name.get_instance_prefix()) self._filter_job_tokens(tokens)
def _read_tokens_from_store(self, store): """Read archived job tokens from the store. Args: store: The store to read tokens from. """ name = Name(workflow=self._workflow, instance=self._instance) tokens = store.read_archived_tokens( name_prefix=name.get_instance_prefix()) self._filter_job_tokens(tokens)
def _get_instance_tokens(self): """Retrieve all workflow instance tokens. Returns: List of tokens in the workflow instance. """ prefix = Name(workflow=self._workflow, instance=self._instance) query = Query(namePrefix=prefix.get_instance_prefix()) query_request = QueryRequest(queries=[query]) try: query_response = self._client.query(query_request) except TokenMasterException: LOG.exception("error sending request %s", query_request) return None if not query_response.tokens: return None assert len(query_response.tokens) == 1 return query_response.tokens[0]