async def get_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None, term_regex=None): req = self._encode_index_req(bucket, index, startkey, endkey, return_terms, max_results, continuation, timeout, term_regex, streaming=False) msg_code, resp = await self._request(messages.MSG_CODE_INDEX_REQ, req, messages.MSG_CODE_INDEX_RESP) if return_terms and resp.results: results = [(decode_index_value(index, pair.key), bytes_to_str(pair.value)) for pair in resp.results] else: results = [bytes_to_str(key) for key in resp.keys] if max_results is not None and resp.HasField('continuation'): return results, bytes_to_str(resp.continuation) else: return results, None
def get_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None, term_regex=None): """ Performs a secondary index query. """ if term_regex and not self.index_term_regex(): raise NotImplementedError("Secondary index term_regex is not " "supported on %s" % self.server_version.vstring) if timeout == 'infinity': timeout = 0 params = {'return_terms': return_terms, 'max_results': max_results, 'continuation': continuation, 'timeout': timeout, 'term_regex': term_regex} bucket_type = self._get_bucket_type(bucket.bucket_type) url = self.index_path(bucket.name, index, startkey, endkey, bucket_type=bucket_type, **params) status, headers, body = self._request('GET', url) self.check_http_code(status, [200]) json_data = json.loads(bytes_to_str(body)) if return_terms and u'results' in json_data: results = [] for result in json_data[u'results'][:]: term, key = list(result.items())[0] results.append((decode_index_value(index, term), key),) else: results = json_data[u'keys'][:] if max_results and u'continuation' in json_data: return (results, json_data[u'continuation']) else: return (results, None)
def _parse_sibling(self, sibling, headers, data): """ Parses a single sibling out of a response. """ sibling.exists = True # Parse the headers... for header, value in headers: header = header.lower() if header == 'content-type': sibling.content_type, sibling.charset = \ self._parse_content_type(value) elif header == 'etag': sibling.etag = value elif header == 'link': sibling.links = self._parse_links(value) elif header == 'last-modified': sibling.last_modified = mktime_tz(parsedate_tz(value)) elif header.startswith('x-riak-meta-'): metakey = header.replace('x-riak-meta-', '') sibling.usermeta[metakey] = value elif header.startswith('x-riak-index-'): field = header.replace('x-riak-index-', '') reader = csv.reader([value], skipinitialspace=True) for line in reader: for token in line: token = decode_index_value(field, token) sibling.add_index(field, token) elif header == 'x-riak-deleted': sibling.exists = False sibling.encoded_data = data return sibling
def get_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None): if not self.pb_indexes(): return self._get_index_mapred_emu(bucket, index, startkey, endkey) req = self._encode_index_req(bucket, index, startkey, endkey, return_terms, max_results, continuation) msg_code, resp = self._request(MSG_CODE_INDEX_REQ, req, MSG_CODE_INDEX_RESP) if return_terms and resp.results: results = [(decode_index_value(index, pair.key), pair.value) for pair in resp.results] else: results = resp.keys[:] if max_results: return (results, resp.continuation) else: return (results, None)
def get_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None): """ Performs a secondary index query. """ if timeout == 'infinity': timeout = 0 params = {'return_terms': return_terms, 'max_results': max_results, 'continuation': continuation, 'timeout': timeout} url = self.index_path(bucket, index, startkey, endkey, **params) status, headers, body = self._request('GET', url) self.check_http_code(status, [200]) json_data = json.loads(body) if return_terms and u'results' in json_data: results = [] for result in json_data[u'results'][:]: term, key = result.items()[0] results.append((decode_index_value(index, term), key),) else: results = json_data[u'keys'][:] if max_results and u'continuation' in json_data: return (results, json_data[u'continuation']) else: return (results, None)
def get_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None, term_regex=None): if not self.pb_indexes(): return self._get_index_mapred_emu(bucket, index, startkey, endkey) if term_regex and not self.index_term_regex(): raise NotImplementedError("Secondary index term_regex is not " "supported") req = self._encode_index_req(bucket, index, startkey, endkey, return_terms, max_results, continuation, timeout, term_regex) msg_code, resp = self._request(MSG_CODE_INDEX_REQ, req, MSG_CODE_INDEX_RESP) if return_terms and resp.results: results = [(decode_index_value(index, pair.key), pair.value) for pair in resp.results] else: results = resp.keys[:] if max_results is not None and resp.HasField('continuation'): return (results, resp.continuation) else: return (results, None)
def get_index( self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None, term_regex=None, ): if not self.pb_indexes(): return self._get_index_mapred_emu(bucket, index, startkey, endkey) if term_regex and not self.index_term_regex(): raise NotImplementedError("Secondary index term_regex is not " "supported") req = self._encode_index_req( bucket, index, startkey, endkey, return_terms, max_results, continuation, timeout, term_regex ) msg_code, resp = self._request(riak.pb.messages.MSG_CODE_INDEX_REQ, req, riak.pb.messages.MSG_CODE_INDEX_RESP) if return_terms and resp.results: results = [(decode_index_value(index, pair.key), bytes_to_str(pair.value)) for pair in resp.results] else: results = resp.keys[:] if PY3: results = [bytes_to_str(key) for key in resp.keys] if max_results is not None and resp.HasField("continuation"): return (results, bytes_to_str(resp.continuation)) else: return (results, None)
def get_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None, term_regex=None): if not self.pb_indexes(): return self._get_index_mapred_emu(bucket, index, startkey, endkey) if term_regex and not self.index_term_regex(): raise NotImplementedError("Secondary index term_regex is not " "supported") req = self._encode_index_req(bucket, index, startkey, endkey, return_terms, max_results, continuation, timeout, term_regex) msg_code, resp = self._request(MSG_CODE_INDEX_REQ, req, MSG_CODE_INDEX_RESP) if return_terms and resp.results: results = [(decode_index_value(index, pair.key), pair.value) for pair in resp.results] else: results = resp.keys[:] if max_results: return (results, resp.continuation) else: return (results, None)
def next(self): response = super(RiakPbcIndexStream, self).next() if response.done and not (response.keys or response.results or response.continuation): raise StopIteration if self.return_terms and response.results: return [(decode_index_value(self.index, r.key), r.value) for r in response.results] elif response.keys: return response.keys[:] elif response.continuation: return CONTINUATION(response.continuation)
def __next__(self): response = next(super(RiakPbcIndexStream, self)) if response.done and not (response.keys or response.results or response.continuation): raise StopIteration if self.return_terms and response.results: return [(decode_index_value(self.index, r.key), r.value) for r in response.results] elif response.keys: return response.keys[:] elif response.continuation: return CONTINUATION(response.continuation)
def decode_index_req(self, resp, index, return_terms=None, max_results=None): if return_terms and resp.results: results = [(decode_index_value(index, pair.key), bytes_to_str(pair.value)) for pair in resp.results] else: results = resp.keys[:] if six.PY3: results = [bytes_to_str(key) for key in resp.keys] if max_results is not None and resp.HasField('continuation'): return (results, bytes_to_str(resp.continuation)) else: return (results, None)
def next(self): response = super(PbufIndexStream, self).next() if response.done and not (response.keys or response.results or response.continuation): raise StopIteration if self.return_terms and response.results: return [(decode_index_value(self.index, r.key), bytes_to_str(r.value)) for r in response.results] elif response.keys: if PY2: return response.keys[:] else: return [bytes_to_str(key) for key in response.keys] elif response.continuation: return CONTINUATION(bytes_to_str(response.continuation))
def _decode_content(self, rpb_content, sibling): """ Decodes a single sibling from the protobuf representation into a RiakObject. :param rpb_content: a single RpbContent message :type rpb_content: riak.pb.riak_pb2.RpbContent :param sibling: a RiakContent sibling container :type sibling: RiakContent :rtype: RiakContent """ if rpb_content.HasField("deleted") and rpb_content.deleted: sibling.exists = False else: sibling.exists = True if rpb_content.HasField("content_type"): sibling.content_type = bytes_to_str(rpb_content.content_type) if rpb_content.HasField("charset"): sibling.charset = bytes_to_str(rpb_content.charset) if rpb_content.HasField("content_encoding"): sibling.content_encoding = \ bytes_to_str(rpb_content.content_encoding) if rpb_content.HasField("vtag"): sibling.etag = bytes_to_str(rpb_content.vtag) sibling.links = [self._decode_link(link) for link in rpb_content.links] if rpb_content.HasField("last_mod"): sibling.last_modified = float(rpb_content.last_mod) if rpb_content.HasField("last_mod_usecs"): sibling.last_modified += rpb_content.last_mod_usecs / 1000000.0 sibling.usermeta = dict([(bytes_to_str(usermd.key), bytes_to_str(usermd.value)) for usermd in rpb_content.usermeta]) sibling.indexes = set([(bytes_to_str(index.key), decode_index_value(index.key, index.value)) for index in rpb_content.indexes]) sibling.encoded_data = rpb_content.value return sibling
def decode_content(self, rpb_content, sibling): """ Decodes a single sibling from the protobuf representation into a RiakObject. :param rpb_content: a single RpbContent message :type rpb_content: riak.pb.riak_pb2.RpbContent :param sibling: a RiakContent sibling container :type sibling: RiakContent :rtype: RiakContent """ if rpb_content.HasField("deleted") and rpb_content.deleted: sibling.exists = False else: sibling.exists = True if rpb_content.HasField("content_type"): sibling.content_type = bytes_to_str(rpb_content.content_type) if rpb_content.HasField("charset"): sibling.charset = bytes_to_str(rpb_content.charset) if rpb_content.HasField("content_encoding"): sibling.content_encoding = \ bytes_to_str(rpb_content.content_encoding) if rpb_content.HasField("vtag"): sibling.etag = bytes_to_str(rpb_content.vtag) sibling.links = [self.decode_link(link) for link in rpb_content.links] if rpb_content.HasField("last_mod"): sibling.last_modified = float(rpb_content.last_mod) if rpb_content.HasField("last_mod_usecs"): sibling.last_modified += rpb_content.last_mod_usecs / 1000000.0 sibling.usermeta = dict([(bytes_to_str(usermd.key), bytes_to_str(usermd.value)) for usermd in rpb_content.usermeta]) sibling.indexes = set([(bytes_to_str(index.key), decode_index_value(index.key, index.value)) for index in rpb_content.indexes]) sibling.encoded_data = rpb_content.value return sibling
def get_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None): """ Performs a secondary index query. """ params = {'return_terms': return_terms, 'max_results': max_results, 'continuation': continuation} url = self.index_path(bucket, index, startkey, endkey, **params) status, headers, body = self._request('GET', url) self.check_http_code(status, [200]) json_data = json.loads(body) if return_terms and u'results' in json_data: results = [] for result in json_data[u'results'][:]: term, key = result.items()[0] results.append((decode_index_value(index, term), key),) else: results = json_data[u'keys'][:] if max_results and u'continuation' in json_data: return (results, json_data[u'continuation']) else: return (results, None)
def _decode_pair(self, pair): return (decode_index_value(self.index, pair[0]), pair[1])